diff --git a/front/src/config/i18n/de.json b/front/src/config/i18n/de.json
index ecb6580da5..7aa907b04e 100644
--- a/front/src/config/i18n/de.json
+++ b/front/src/config/i18n/de.json
@@ -1792,6 +1792,14 @@
"textPlaceholder": "Nachrichtentext",
"explanationText": "Um eine Variable in den Text einzufügen, gib \"{{\" ein. Um einen Variablenwert festzulegen, musst du zuerst das Feld \"Gerätewert abrufen\" verwenden."
},
+ "askAi": {
+ "description": "Diese Aktion sendet eine Nachricht an die KI (ChatGPT) über Gladys Plus und antwortet Ihnen mit einer Nachricht. Wenn Sie ein Kamerabild anhängen, kann die KI sehen, was auf dem Bild ist, und sogar entsprechend handeln.",
+ "userLabel": "Gladys wird antworten an",
+ "cameraLabel": "Ein Kamerabild anhängen (optional)",
+ "textLabel": "Meine Frage an die KI",
+ "textPlaceholder": "Nachrichtentext",
+ "explanationText": "Um eine Variable einzufügen, geben Sie '{{' ein. Achten Sie darauf, dass Sie zuvor eine Variable in einer Aktion 'Letzten Zustand abrufen' definiert haben, die vor diesem Nachrichtenblock platziert wurde."
+ },
"turnOnLights": {
"label": "Wähle die Lichter aus, die eingeschaltet werden sollen"
},
@@ -2018,6 +2026,9 @@
},
"music": {
"play-notification": "Auf einem Lautsprecher sprechen"
+ },
+ "ai": {
+ "ask": "Frage die KI"
}
},
"variables": {
diff --git a/front/src/config/i18n/en.json b/front/src/config/i18n/en.json
index 85f1b09322..10acaf6e99 100644
--- a/front/src/config/i18n/en.json
+++ b/front/src/config/i18n/en.json
@@ -1792,6 +1792,14 @@
"textPlaceholder": "Message text",
"explanationText": "To inject a variable in the text, press '{{'. To set a variable value, you need to use the 'Get device value' box before this one."
},
+ "askAi": {
+ "description": "This action will send a message to the AI (ChatGPT) via Gladys Plus and will reply to you with a message. If you attach a camera image, the AI will be able to see what is on the image and even take action accordingly.",
+ "userLabel": "Gladys will respond to",
+ "cameraLabel": "Attach a camera image (optional)",
+ "textLabel": "My question for the AI",
+ "textPlaceholder": "Message text",
+ "explanationText": "To insert a variable, type '{{'. Be careful, you must have defined a variable beforehand in a 'Retrieve the last state' action placed before this message block."
+ },
"turnOnLights": {
"label": "Select the lights you want to turn on"
},
@@ -2018,6 +2026,9 @@
},
"music": {
"play-notification": "Talk on a speaker"
+ },
+ "ai": {
+ "ask": "Ask AI"
}
},
"variables": {
diff --git a/front/src/config/i18n/fr.json b/front/src/config/i18n/fr.json
index dba32fdf4a..47dea0ad50 100644
--- a/front/src/config/i18n/fr.json
+++ b/front/src/config/i18n/fr.json
@@ -1792,6 +1792,14 @@
"textPlaceholder": "Texte du message",
"explanationText": "Pour injecter une variable, tapez '{{'. Attention, vous devez avoir défini une variable auparavant dans une action 'Récupérer le dernier état' placé avant ce bloc message."
},
+ "askAi": {
+ "description": "Cette action enverra un message à l'IA (ChatGPT) via Gladys Plus et vous répondra par message. Si vous joignez une image de caméra, l'IA pourra voir ce qu'il y a sur l'image et même faire une action en conséquence.",
+ "userLabel": "Gladys répondra à",
+ "cameraLabel": "Joindre une image de caméra (optionnel)",
+ "textLabel": "Ma question pour l'IA",
+ "textPlaceholder": "Texte du message",
+ "explanationText": "Pour injecter une variable, tapez '{{'. Attention, vous devez avoir défini une variable auparavant dans une action 'Récupérer le dernier état' placé avant ce bloc message."
+ },
"turnOnLights": {
"label": "Sélectionnez les lumières que vous souhaitez allumer"
},
@@ -2018,6 +2026,9 @@
},
"music": {
"play-notification": "Parler sur une enceinte"
+ },
+ "ai": {
+ "ask": "Demander à l'IA"
}
},
"variables": {
diff --git a/front/src/routes/scene/edit-scene/ActionCard.jsx b/front/src/routes/scene/edit-scene/ActionCard.jsx
index 79cb1f62bc..7fae038dba 100644
--- a/front/src/routes/scene/edit-scene/ActionCard.jsx
+++ b/front/src/routes/scene/edit-scene/ActionCard.jsx
@@ -31,6 +31,7 @@ import SetAlarmMode from './actions/SetAlarmMode';
import SendMqttMessage from './actions/SendMqttMessage';
import PlayNotification from './actions/PlayNotification';
import EdfTempoCondition from './actions/EdfTempoCondition';
+import AskAI from './actions/AskAI';
const deleteActionFromColumn = (columnIndex, rowIndex, deleteAction) => () => {
deleteAction(columnIndex, rowIndex);
@@ -64,7 +65,8 @@ const ACTION_ICON = {
[ACTIONS.ALARM.CHECK_ALARM_MODE]: 'fe fe-bell',
[ACTIONS.ALARM.SET_ALARM_MODE]: 'fe fe-bell',
[ACTIONS.MQTT.SEND]: 'fe fe-message-square',
- [ACTIONS.MUSIC.PLAY_NOTIFICATION]: 'fe fe-speaker'
+ [ACTIONS.MUSIC.PLAY_NOTIFICATION]: 'fe fe-speaker',
+ [ACTIONS.AI.ASK]: 'fe fe-cpu'
};
const ACTION_CARD_TYPE = 'ACTION_CARD_TYPE';
@@ -401,6 +403,17 @@ const ActionCard = ({ children, ...props }) => {
triggersVariables={props.triggersVariables}
/>
)}
+ {props.action.type === ACTIONS.AI.ASK && (
+
+ )}
diff --git a/front/src/routes/scene/edit-scene/actions/AskAI.jsx b/front/src/routes/scene/edit-scene/actions/AskAI.jsx
new file mode 100644
index 0000000000..4e853f85b6
--- /dev/null
+++ b/front/src/routes/scene/edit-scene/actions/AskAI.jsx
@@ -0,0 +1,149 @@
+import Select from 'react-select';
+import { Component } from 'preact';
+import { connect } from 'unistore/preact';
+import { Localizer, Text } from 'preact-i18n';
+
+import TextWithVariablesInjected from '../../../../components/scene/TextWithVariablesInjected';
+
+class AskAI extends Component {
+ getOptions = async () => {
+ try {
+ const users = await this.props.httpClient.get('/api/v1/user');
+ const userOptions = [];
+ users.forEach(user => {
+ userOptions.push({
+ label: user.firstname,
+ value: user.selector
+ });
+ });
+
+ const cameras = await this.props.httpClient.get('/api/v1/camera');
+ const cameraOptions = cameras.map(camera => ({
+ label: camera.name,
+ value: camera.selector
+ }));
+
+ await this.setState({ userOptions, cameraOptions });
+ this.refreshSelectedOptions(this.props);
+ return userOptions;
+ } catch (e) {
+ console.error(e);
+ }
+ };
+ updateText = text => {
+ this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'text', text);
+ };
+ handleUserChange = selectedOption => {
+ if (selectedOption && selectedOption.value) {
+ this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'user', selectedOption.value);
+ } else {
+ this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'user', null);
+ }
+ };
+ handleCameraChange = selectedOption => {
+ if (selectedOption && selectedOption.value) {
+ this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'camera', selectedOption.value);
+ } else {
+ this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'camera', undefined);
+ }
+ };
+
+ refreshSelectedOptions = nextProps => {
+ let selectedUserOption = '';
+ if (nextProps.action.user && this.state.userOptions) {
+ const userOption = this.state.userOptions.find(option => option.value === nextProps.action.user);
+
+ if (userOption) {
+ selectedUserOption = userOption;
+ }
+ }
+ let selectedCameraOption = '';
+ if (nextProps.action.camera && this.state.cameraOptions) {
+ const cameraOption = this.state.cameraOptions.find(option => option.value === nextProps.action.camera);
+
+ if (cameraOption) {
+ selectedCameraOption = cameraOption;
+ }
+ }
+ this.setState({ selectedUserOption, selectedCameraOption });
+ };
+ constructor(props) {
+ super(props);
+ this.props = props;
+ this.state = {
+ selectedOption: ''
+ };
+ }
+ componentDidMount() {
+ this.getOptions();
+ }
+ componentWillReceiveProps(nextProps) {
+ this.refreshSelectedOptions(nextProps);
+ }
+ render(props, { selectedUserOption, userOptions, selectedCameraOption, cameraOptions }) {
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+ }
+}
+
+export default connect('httpClient', {})(AskAI);
diff --git a/front/src/routes/scene/edit-scene/actions/ChooseActionTypeCard.jsx b/front/src/routes/scene/edit-scene/actions/ChooseActionTypeCard.jsx
index 71ac342748..786248fb80 100644
--- a/front/src/routes/scene/edit-scene/actions/ChooseActionTypeCard.jsx
+++ b/front/src/routes/scene/edit-scene/actions/ChooseActionTypeCard.jsx
@@ -33,7 +33,8 @@ const ACTION_LIST = [
ACTIONS.ALARM.CHECK_ALARM_MODE,
ACTIONS.ALARM.SET_ALARM_MODE,
ACTIONS.MQTT.SEND,
- ACTIONS.MUSIC.PLAY_NOTIFICATION
+ ACTIONS.MUSIC.PLAY_NOTIFICATION,
+ ACTIONS.AI.ASK
];
const TRANSLATIONS = ACTION_LIST.reduce((acc, action) => {
diff --git a/server/lib/gateway/gateway.forwardMessageToOpenAI.js b/server/lib/gateway/gateway.forwardMessageToOpenAI.js
index c97419d2ab..bcdae2a0f9 100644
--- a/server/lib/gateway/gateway.forwardMessageToOpenAI.js
+++ b/server/lib/gateway/gateway.forwardMessageToOpenAI.js
@@ -1,3 +1,4 @@
+const logger = require('../../utils/logger');
const { Error429 } = require('../../utils/httpErrors');
const intentTranslation = {
@@ -17,16 +18,18 @@ const disableOpenAiFirstReply = new Set(['GET_TEMPERATURE', 'GET_HUMIDITY']);
* @description Handle a new message sent by a user to Gladys.
* @param {object} request - A request sent.
* @param {object} request.message - A message sent by a user.
+ * @param {object} request.image - An image to send.
* @param {Array} request.previousQuestions - List of previous messages.
* @param {object} request.context - Context of messages (user, etc...).
* @returns {Promise} Return classification.
* @example
* forwardMessageToOpenAI(request);
*/
-async function forwardMessageToOpenAI({ message, previousQuestions, context }) {
+async function forwardMessageToOpenAI({ message, image, previousQuestions, context }) {
try {
const response = await this.openAIAsk({
question: message.text,
+ image,
previous_questions: previousQuestions,
});
@@ -63,6 +66,7 @@ async function forwardMessageToOpenAI({ message, previousQuestions, context }) {
return classification;
} catch (e) {
+ logger.warn(e);
if (e instanceof Error429) {
await this.message.replyByIntent(message, 'openai.request.tooManyRequests', context);
} else {
diff --git a/server/lib/message/message.reply.js b/server/lib/message/message.reply.js
index 60ee04760d..1a5b36b66f 100644
--- a/server/lib/message/message.reply.js
+++ b/server/lib/message/message.reply.js
@@ -27,11 +27,38 @@ async function reply(originalMessage, text, context, file = null) {
userId: originalMessage.user.id,
payload: messageCreated,
});
- // then, we get the service sending the original message
- const service = this.service.getService(originalMessage.source);
- // if the service exist, we send the message
- if (service) {
- await service.message.send(originalMessage.source_user_id, messageCreated);
+
+ // If the source is Gladys AI, then we should answer by all means available
+ if (originalMessage.source === 'AI') {
+ const user = this.state.get('user', originalMessage.user.selector);
+ const telegramService = this.service.getService('telegram');
+ // if the service exist and the user had telegram configured
+ if (telegramService && user.telegram_user_id) {
+ // we forward the message to Telegram
+ await telegramService.message.send(user.telegram_user_id, messageCreated);
+ }
+ // We send the message to the nextcloud talk service
+ const nextcloudTalkService = this.service.getService('nextcloud-talk');
+ // if the service exist
+ if (nextcloudTalkService) {
+ const nextcloudTalkToken = await this.variable.getValue(
+ 'NEXTCLOUD_TALK_TOKEN',
+ nextcloudTalkService.message.serviceId,
+ user.id,
+ );
+ // if the user had nextcloud talk configured
+ if (nextcloudTalkToken) {
+ // we forward the message to Nextcloud Talk
+ await nextcloudTalkService.message.send(nextcloudTalkToken, messageCreated);
+ }
+ }
+ } else {
+ // then, we get the service sending the original message
+ const service = this.service.getService(originalMessage.source);
+ // if the service exist, we send the message
+ if (service) {
+ await service.message.send(originalMessage.source_user_id, messageCreated);
+ }
}
} catch (e) {
logger.warn(`Unable to reply to user`);
diff --git a/server/lib/scene/scene.actions.js b/server/lib/scene/scene.actions.js
index 64c988a7f9..dafbcc422e 100644
--- a/server/lib/scene/scene.actions.js
+++ b/server/lib/scene/scene.actions.js
@@ -245,6 +245,30 @@ const actionsFunc = {
const image = await self.device.camera.getLiveImage(action.camera);
await self.message.sendToUser(action.user, textWithVariables, image);
},
+ [ACTIONS.AI.ASK]: async (self, action, scope) => {
+ const textWithVariables = Handlebars.compile(action.text)(scope);
+ let image;
+ if (action.camera) {
+ image = await self.device.camera.getLiveImage(action.camera);
+ image = `data:${image}`;
+ }
+ const user = self.stateManager.get('user', action.user);
+ const message = {
+ source: 'AI',
+ user: {
+ id: user.id,
+ language: user.language,
+ selector: user.selector,
+ },
+ language: user.language,
+ text: textWithVariables,
+ };
+ await self.gateway.forwardMessageToOpenAI({
+ message,
+ image,
+ context: {},
+ });
+ },
[ACTIONS.DEVICE.GET_VALUE]: async (self, action, scope, columnIndex, rowIndex) => {
const deviceFeature = self.stateManager.get('deviceFeature', action.device_feature);
set(
diff --git a/server/lib/state/index.js b/server/lib/state/index.js
index 739717f70b..c0192773e2 100644
--- a/server/lib/state/index.js
+++ b/server/lib/state/index.js
@@ -32,6 +32,17 @@ function get(entity, entitySelector) {
return this.state[entity][entitySelector].get();
}
+/**
+ * @description Return all keys.
+ * @param {string} entity - The type of entity we should get the value from.
+ * @returns {any} Return the full state in store.
+ * @example
+ * stateManager.get('device', 'main-lamp');
+ */
+function getAllKeys(entity) {
+ return Object.keys(this.state[entity]);
+}
+
/**
* @description Return the value of a key in the store.
* @param {string} entity - The type of entity we should get the value from.
@@ -85,5 +96,6 @@ StateManager.prototype.setState = setState;
StateManager.prototype.deleteState = deleteState;
StateManager.prototype.get = get;
StateManager.prototype.getKey = getKey;
+StateManager.prototype.getAllKeys = getAllKeys;
module.exports = StateManager;
diff --git a/server/test/lib/gateway/gateway.forwardMessageToOpenAI.test.js b/server/test/lib/gateway/gateway.forwardMessageToOpenAI.test.js
index 7ce2e75429..e6913af973 100644
--- a/server/test/lib/gateway/gateway.forwardMessageToOpenAI.test.js
+++ b/server/test/lib/gateway/gateway.forwardMessageToOpenAI.test.js
@@ -3,6 +3,7 @@ const { expect } = require('chai');
const proxyquire = require('proxyquire').noCallThru();
const EventEmitter = require('events');
+const StateManager = require('../../../lib/state');
const GladysGatewayClientMock = require('./GladysGatewayClientMock.test');
const event = new EventEmitter();
@@ -48,7 +49,88 @@ describe('gateway.forwardMessageToOpenAI', () => {
getEntityIdByName: fake.returns('14a8ad23-78fa-45e4-8583-f5452792818d'),
};
const serviceManager = {};
- const stateManager = {};
+ const stateManager = new StateManager(event);
+ stateManager.setState('device', 'co2-sensor', {
+ id: 'bd2eaaef-5baa-4aa6-8c94-ccce01026a53',
+ service_id: '59d7e9e2-a5a5-43b0-8796-287e00670355',
+ room_id: '3dc8243a-983f-45bc-94e8-f0c651b78c5d',
+ name: 'Capteur salon',
+ selector: 'mqtt-test',
+ model: null,
+ external_id: 'mqtt:test',
+ should_poll: false,
+ poll_frequency: null,
+ created_at: '2024-08-10T07:34:51.798Z',
+ updated_at: '2024-10-18T02:00:49.733Z',
+ features: [
+ {
+ id: '31fc42aa-f338-40c1-bcc0-c14d37470e71',
+ device_id: 'bd2eaaef-5baa-4aa6-8c94-ccce01026a53',
+ name: 'Batterie',
+ selector: 'mqtt-battery',
+ external_id: 'mqtt:battery',
+ category: 'battery',
+ type: 'integer',
+ read_only: true,
+ keep_history: true,
+ has_feedback: false,
+ unit: 'percent',
+ min: 0,
+ max: 100,
+ last_value: null,
+ last_value_string: null,
+ last_value_changed: null,
+ last_hourly_aggregate: null,
+ last_daily_aggregate: null,
+ last_monthly_aggregate: null,
+ created_at: '2024-10-04T01:10:45.283Z',
+ updated_at: '2024-10-04T01:11:22.787Z',
+ },
+ {
+ id: '444b306d-5a2c-49f6-a8a5-ffe2a1d7cb11',
+ device_id: 'bd2eaaef-5baa-4aa6-8c94-ccce01026a53',
+ name: 'Niveau de Co2',
+ selector: 'mqtt-co2',
+ external_id: 'mqtt:co2',
+ category: 'co2-sensor',
+ type: 'integer',
+ read_only: true,
+ keep_history: true,
+ has_feedback: false,
+ unit: 'ppm',
+ min: 0,
+ max: 100000,
+ last_value: 1200,
+ last_value_string: null,
+ last_value_changed: '2024-10-18T01:40:12.042Z',
+ last_hourly_aggregate: null,
+ last_daily_aggregate: null,
+ last_monthly_aggregate: null,
+ created_at: '2024-10-04T01:11:22.783Z',
+ updated_at: '2024-10-18T02:01:16.000Z',
+ },
+ ],
+ params: [],
+ room: {
+ id: '3dc8243a-983f-45bc-94e8-f0c651b78c5d',
+ house_id: 'ec0e36a8-f370-4157-9249-3892a6e3a52c',
+ name: 'salon',
+ selector: 'salon',
+ created_at: '2024-10-11T06:43:37.620Z',
+ updated_at: '2024-10-11T06:43:37.620Z',
+ },
+ service: {
+ id: '59d7e9e2-a5a5-43b0-8796-287e00670355',
+ pod_id: null,
+ name: 'mqtt',
+ selector: 'mqtt',
+ version: '0.1.0',
+ has_message_feature: false,
+ status: 'RUNNING',
+ created_at: '2024-08-08T12:59:46.450Z',
+ updated_at: '2024-10-18T02:08:20.564Z',
+ },
+ });
gateway = new Gateway(
variable,
event,
@@ -78,6 +160,11 @@ describe('gateway.forwardMessageToOpenAI', () => {
room: 'living room',
});
const classification = await gateway.forwardMessageToOpenAI({ message, previousQuestions, context });
+ assert.calledWith(gateway.gladysGatewayClient.openAIAsk, {
+ question: 'Turn on the light in the living room',
+ image: undefined,
+ previous_questions: [],
+ });
expect(classification).to.deep.equal({
entities: [
{
diff --git a/server/test/lib/message/message.reply.test.js b/server/test/lib/message/message.reply.test.js
index 85a7563f84..884b9ac9b5 100644
--- a/server/test/lib/message/message.reply.test.js
+++ b/server/test/lib/message/message.reply.test.js
@@ -2,27 +2,68 @@ const { assert, fake } = require('sinon');
const EventEmitter = require('events');
const MessageHandler = require('../../../lib/message');
-let send;
+describe('message.reply', () => {
+ const eventEmitter = new EventEmitter();
+ let messageHandler;
+ let telegramService;
+ let nextCloudTalkService;
+ let apiClientSource;
+ let variable;
+ beforeEach(() => {
+ telegramService = {
+ message: {
+ send: fake.resolves(null),
+ },
+ };
-// mocks
-const classification = { intent: 'light.turnon', entities: [{ hey: 1 }] };
-const brain = {
- classify: () => Promise.resolve({ classification }),
-};
-const service = {
- getService: () => {
- send = fake.resolves(true);
- return {
+ nextCloudTalkService = {
message: {
- send,
+ send: fake.resolves(null),
},
};
- },
-};
-describe('message.reply', () => {
- const eventEmitter = new EventEmitter();
- const messageHandler = new MessageHandler(eventEmitter, brain, service);
+ apiClientSource = {
+ message: {
+ send: fake.resolves(null),
+ },
+ };
+
+ // mocks
+ const classification = { intent: 'light.turnon', entities: [{ hey: 1 }] };
+ const brain = {
+ classify: () => Promise.resolve({ classification }),
+ };
+ const service = {
+ getService: (serviceName) => {
+ if (serviceName === 'telegram') {
+ return telegramService;
+ }
+ if (serviceName === 'nextcloud-talk') {
+ return nextCloudTalkService;
+ }
+ if (serviceName === 'api-client') {
+ return apiClientSource;
+ }
+
+ return null;
+ },
+ };
+
+ const state = {
+ get: () => {
+ return {
+ telegram_user_id: 'telegram-user-id',
+ };
+ },
+ };
+
+ variable = {
+ getValue: () => {
+ return 'next-cloud-talk-token';
+ },
+ };
+ messageHandler = new MessageHandler(eventEmitter, brain, service, state, variable);
+ });
it('should send reply', async () => {
await messageHandler.reply(
{
@@ -37,6 +78,40 @@ describe('message.reply', () => {
'hey!',
{},
);
- assert.calledWith(send, 'XXXX');
+ assert.calledWith(apiClientSource.message.send, 'XXXX');
+ });
+ it('should send reply to all source', async () => {
+ await messageHandler.reply(
+ {
+ language: 'en',
+ source: 'AI',
+ source_user_id: 'XXXX',
+ user: {
+ id: '0cd30aef-9c4e-4a23-88e3-3547971296e5',
+ language: 'en',
+ },
+ },
+ 'hey!',
+ {},
+ );
+ assert.calledWith(telegramService.message.send, 'telegram-user-id');
+ assert.calledWith(nextCloudTalkService.message.send, 'next-cloud-talk-token');
+ });
+ it('should fail to reply', async () => {
+ variable.getValue = fake.rejects(new Error('cannot get'));
+ await messageHandler.reply(
+ {
+ language: 'en',
+ source: 'AI',
+ source_user_id: 'XXXX',
+ user: {
+ id: '0cd30aef-9c4e-4a23-88e3-3547971296e5',
+ language: 'en',
+ },
+ },
+ 'hey!',
+ {},
+ );
+ assert.notCalled(nextCloudTalkService.message.send);
});
});
diff --git a/server/test/lib/scene/actions/scene.action.askAi.test.js b/server/test/lib/scene/actions/scene.action.askAi.test.js
new file mode 100644
index 0000000000..08f7f9630f
--- /dev/null
+++ b/server/test/lib/scene/actions/scene.action.askAi.test.js
@@ -0,0 +1,70 @@
+const { fake, assert } = require('sinon');
+const EventEmitter = require('events');
+const { ACTIONS } = require('../../../../utils/constants');
+const { executeActions } = require('../../../../lib/scene/scene.executeActions');
+const StateManager = require('../../../../lib/state');
+
+const event = new EventEmitter();
+
+describe('scene.ask-ai', () => {
+ it('should ask AI about a camera image then send answer to user', async () => {
+ const stateManager = new StateManager(event);
+ stateManager.setState('deviceFeature', 'my-device-feature', {
+ category: 'light',
+ type: 'binary',
+ last_value: 15,
+ });
+ stateManager.setState('user', 'pepper', {
+ id: '7c8db9e1-4f34-4693-a04c-7b1bfc1dbdc4',
+ language: 'fr',
+ selector: 'pepper',
+ });
+ const message = {
+ sendToUser: fake.resolves(null),
+ };
+ const device = {
+ camera: {
+ getLiveImage: fake.resolves('image-content'),
+ },
+ };
+ const gateway = {
+ forwardMessageToOpenAI: fake.resolves(null),
+ };
+ const scope = {};
+ await executeActions(
+ { stateManager, event, message, device, gateway },
+ [
+ [
+ {
+ type: ACTIONS.DEVICE.GET_VALUE,
+ device_feature: 'my-device-feature',
+ },
+ ],
+ [
+ {
+ type: ACTIONS.AI.ASK,
+ user: 'pepper',
+ camera: 'my-camera',
+ text:
+ 'Can you check if the camera in the living room is fine ? Temperature is high ({{0.0.last_value}} °C).',
+ },
+ ],
+ ],
+ scope,
+ );
+ assert.calledWith(gateway.forwardMessageToOpenAI, {
+ message: {
+ source: 'AI',
+ user: {
+ id: '7c8db9e1-4f34-4693-a04c-7b1bfc1dbdc4',
+ language: 'fr',
+ selector: 'pepper',
+ },
+ language: 'fr',
+ text: 'Can you check if the camera in the living room is fine ? Temperature is high (15 °C).',
+ },
+ image: 'data:image-content',
+ context: {},
+ });
+ });
+});
diff --git a/server/test/lib/state/state.test.js b/server/test/lib/state/state.test.js
index caaae58706..b0d685266d 100644
--- a/server/test/lib/state/state.test.js
+++ b/server/test/lib/state/state.test.js
@@ -34,6 +34,14 @@ describe('state', () => {
const userSleepState = stateManager.getKey('user', 'tony', 'sleep');
expect(userSleepState).to.equal('asleep');
});
+ it('should get all users keys', async () => {
+ const stateManager = new StateManager(event);
+ stateManager.setState('user', 'tony', {
+ sleep: 'asleep',
+ });
+ const keys = stateManager.getAllKeys('user');
+ expect(keys).to.deep.equal(['tony']);
+ });
it('should return null', async () => {
const stateManager = new StateManager(event);
const userSleepState = stateManager.getKey('user', 'tony', 'sleep');
diff --git a/server/utils/constants.js b/server/utils/constants.js
index d17be5ae08..d669799326 100644
--- a/server/utils/constants.js
+++ b/server/utils/constants.js
@@ -344,6 +344,9 @@ const CONDITIONS = {
};
const ACTIONS = {
+ AI: {
+ ASK: 'ai.ask',
+ },
ALARM: {
CHECK_ALARM_MODE: 'alarm.check-alarm-mode',
SET_ALARM_MODE: 'alarm.set-alarm-mode',