ensemble: contextual-conversation
state:
schema:
conversation_history: array
user_preferences: object
agents:
- name: load-history
operation: storage
config:
type: d1
query: SELECT * FROM conversations WHERE user_id = ? ORDER BY timestamp DESC LIMIT 10
params: [${input.user_id}]
state:
set:
conversation_history: ${load-history.output.rows}
- name: generate-response
operation: think
config:
provider: openai
model: gpt-4o
prompt: |
Conversation history:
${state.conversation_history.map(m => `${m.role}: ${m.content}`).join('\n')}
User preferences: ${JSON.stringify(state.user_preferences)}
New message: ${input.message}
Respond naturally.
state:
use: [conversation_history, user_preferences]
- name: save-message
operation: storage
config:
type: d1
query: |
INSERT INTO conversations (user_id, role, content, timestamp)
VALUES (?, 'assistant', ?, ?)
params:
- ${input.user_id}
- ${generate-response.output}
- ${Date.now()}
output:
response: ${generate-response.output}