Skip to content

Commit 74f5bbc

Browse files
committed
Fixed taskbreakdown UI, added generate audio functionality
1 parent 2f0405c commit 74f5bbc

File tree

5 files changed

+197
-43
lines changed

5 files changed

+197
-43
lines changed

azure-functions/jobassistai-http-trigger-openai/system_prompt_mapping.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{
2-
"taskbreakdown" : "You are a supportive AI assistant helping job coaches create notes for employee with disabilities in a supported employment program. Be respectful, sensitive and never use stigmatizing language. Break down tasks into simple, numbered steps (Format: 1. [Step] 2. [Step]. Note to Job Coach (Optional). Additional training resources (Optional)) using clear, easy-to-understand language. Steps should be small, specific, and achievable, focusing on one action at a time. Adjust instructions to fit the employee's disability and cognitive abilities.",
2+
"taskbreakdown" : "You are a supportive AI assistant helping job coaches create notes for employee with disabilities in a supported employment program. Be respectful, sensitive and never use stigmatizing language. Break down tasks into simple, numbered steps (Format: 1. [Step] 2. [Step]. Note to Job Coach (Optional). Additional training resources (Optional)) using clear, easy-to-understand language. Steps should be small, specific, and achievable, focusing on one action at a time. Adjust instructions to fit the employee's disability and cognitive abilities. Return a structured JSON response where: \"steps_for_employee\" is a single string combining all steps, separated by \" | \", \"note_to_job_coach\" is a single string combining all relevant notes, separated by \" | \", \"additional_training_resources\" is a single string listing resources in \"Resource Name: URL\" format, separated by \" | \". Ensure the response is structured and easy to parse.",
33
"search_insights" : "You are a knowledge retention tool for job coaches. Job coaches store random data in Cosmos DB with category and details. This data helps new job coaches answer questions when the original coach is unavailable due to sickness or leave. When a new job coach queries the system, the search results from Cosmos DB are processed using Azure AI Search to retrieve relevant insights. From the retrieved details, extract only the most relevant and concise information that answers the search query. Filter or summarize irrelevant information as needed.",
44
"create_insight" : "You are a supportive AI helping job coaches save their knowledge. Given a voice memo transcript, analyze it and return a structured JSON response. Extract the main theme as \"category\" and provide a concise but complete summary as \"details\", ensuring no critical information is lost.",
55
"chat": "You are an AI assistant for job placement specialists. You are trained on the same material as the specialists so you understand their roles & responsibilities as well as the mission & values of their organization.Your assistance is invaluable to helping the specialists operate efficiently to improve the lives of their consumers.You may receive general or specific questions and you should helpfully respond accordingly, always using the reference material as your primary source and providing citations as needed.You may also receive structured detail regarding the context in which the question is being asked such as an event that has taken place or detail regarding a consumer.If you receive only the structured detail, respond with the next best action that should be taken based on this detail.If the detail is accompanied by an inquiry, incorporate the detail as context to provide a more targeted response.You should format your response using simple html tags (e.g. ul, li, a, b) that can be embedded in the user's chat window."

demo-app/app.py

Lines changed: 39 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import sys
77
import io
88
import time
9+
import base64
910
from azure.storage.blob import BlobServiceClient
1011
import mimetypes
1112
import requests
@@ -23,6 +24,7 @@
2324
FUNCTION_HTTP_OPENAI_WITH_INDEX_URL = os.getenv("FUNCTION_HTTP_OPENAI_WITH_INDEX_URL")
2425
FUNCTION_SAVE_INSIGHTS_URL = os.getenv("FUNCTION_SAVE_INSIGHTS_URL")
2526
FUNCTION_SEARCH_INSIGHTS_URL = os.getenv("FUNCTION_SEARCH_INSIGHTS_URL")
27+
FUNCTION_HTTP_TEXT_TO_SPEECH_URL = os.getenv("FUNCTION_HTTP_TEXT_TO_SPEECH_URL")
2628

2729
SYSTEM_ROLE_TASKBREAKDOWN = os.getenv("SYSTEM_ROLE_TASKBREAKDOWN")
2830
SYSTEM_ROLE_CHAT = os.getenv("SYSTEM_ROLE_CHAT","chat")
@@ -219,7 +221,7 @@ def download_processed(filename):
219221
flash(f"Error downloading file: {str(e)}")
220222
return redirect(url_for('index'))
221223

222-
@app.route('/ai/task-breakdown', methods=['POST'])
224+
@app.route('/api/task-breakdown', methods=['POST'])
223225
@login_required
224226
def ai_task_breakdown():
225227
try:
@@ -257,27 +259,20 @@ def ai_task_breakdown():
257259
response = requests.post(FUNCTION_HTTP_OPENAI_URL, json=payload, timeout=30)
258260

259261
if response.status_code == 200:
260-
result = response.json()
261-
message = result.get('message', '')
262+
response_data = response.json()
263+
raw_message = response_data.get("message", {})
264+
cleaned_response = raw_message.replace("```json", "").replace("```", "").strip()
265+
response_message = json.loads(cleaned_response)
262266

263-
# Parse the response (assuming it’s a string with steps and accommodations)
264-
steps = []
265-
accommodations_list = []
266-
current_list = steps
267-
for line in message.split('\n'):
268-
line = line.strip()
269-
if line.startswith('Steps:') or line.startswith('Step-by-Step Instructions:'):
270-
current_list = steps
271-
elif line.startswith('Accommodations:') or line.startswith('Suggested Accommodations:'):
272-
current_list = accommodations_list
273-
elif line and line[0].isdigit():
274-
current_list.append(line)
275-
elif line.startswith('- ') or line.startswith('* '):
276-
current_list.append(line[2:])
267+
# Extract from response_message
268+
steps_for_employee = response_message.get("steps_for_employee", "")
269+
note_to_job_coach = response_message.get("note_to_job_coach", "")
270+
additional_training_resources = response_message.get("additional_training_resources", "")
277271

278272
return jsonify({
279-
'steps': steps,
280-
'accommodations': accommodations_list
273+
'steps_for_employee': steps_for_employee,
274+
'note_to_job_coach': note_to_job_coach,
275+
'additional_training_resources': additional_training_resources
281276
})
282277
else:
283278
return jsonify({'error': f"Failed to generate breakdown: {response.text}"}), 500
@@ -286,6 +281,31 @@ def ai_task_breakdown():
286281
print(f"Error in task breakdown: {str(e)}")
287282
return jsonify({'error': str(e)}), 500
288283

284+
@app.route('/api/generateAudio', methods=['POST'])
285+
@login_required
286+
def generate_audio():
287+
print("generate_audio : Generating audio for task breakdown start")
288+
try:
289+
data = request.json
290+
text = data.get('text')
291+
if text:
292+
payload = {"text" : text}
293+
response = requests.post(FUNCTION_HTTP_TEXT_TO_SPEECH_URL, json=payload)
294+
if response.status_code == 200:
295+
# Encode the audio binary data to base64
296+
audio_base64 = base64.b64encode(response.content).decode('utf-8')
297+
# Return the audio data in JSON format
298+
return jsonify({
299+
'audio': audio_base64
300+
})
301+
else:
302+
return jsonify({'error': 'Failed to generate audio.'}), 400
303+
else:
304+
return jsonify({'error': 'No text provided for audio generation'}), 400
305+
except Exception as e:
306+
print(f"Error while generating audio for task breakdown: {str(e)}")
307+
return jsonify({'error': str(e)}), 500
308+
289309
# Add this new route for the AI chat functionality
290310
@app.route('/api/chat', methods=['POST'])
291311
@login_required

demo-app/static/css/style.css

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ main {
294294
padding: 1rem;
295295
}
296296

297-
#taskSteps li, #taskAccommodations li {
297+
#taskSteps li, #taskNoteToJobCoach li, #taskAdditionalResources li {
298298
margin-bottom: 0.75rem;
299299
line-height: 1.5;
300300
}

demo-app/static/js/script.js

Lines changed: 132 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -132,12 +132,28 @@ document.addEventListener('DOMContentLoaded', function() {
132132
// Task Breakdown Generator
133133
const generateTaskBtn = document.getElementById('generateTaskBreakdown');
134134
const taskResultDiv = document.getElementById('taskBreakdownResult');
135+
const loadingSpinner = document.getElementById("loadingSpinner");
135136
const taskStepsList = document.getElementById('taskSteps');
136-
const taskAccommodationsList = document.getElementById('taskAccommodations');
137+
const taskNoteToJobCoachList = document.getElementById('taskNoteToJobCoach');
138+
const taskAdditionalResourcesList = document.getElementById('taskAdditionalResources');
139+
const generateAudioButton = document.getElementById('generateAudioButton');
140+
const audioLoadingSpinner = document.getElementById('audioLoadingSpinner');
137141
const saveTaskBtn = document.getElementById('saveTaskBreakdown');
138142

139143
if (generateTaskBtn) {
140144
generateTaskBtn.addEventListener('click', async function() {
145+
// Show loading spinner and hide the result container
146+
loadingSpinner.classList.remove('d-none'); // Show the spinner
147+
taskResultDiv.classList.add('d-none');
148+
149+
// Clear the previous audio and hide the audio player
150+
const audioPlayer = document.getElementById('audioPlayer');
151+
audioPlayer.src = '';
152+
audioPlayer.classList.add('d-none');
153+
154+
// Hide the audio loading spinner
155+
audioLoadingSpinner.classList.add('d-none');
156+
141157
const taskName = document.getElementById('taskName').value;
142158
const taskDetails = document.getElementById('taskDetails').value;
143159
const needsVisual = document.getElementById('needsVisual').checked;
@@ -148,14 +164,8 @@ document.addEventListener('DOMContentLoaded', function() {
148164
return;
149165
}
150166

151-
// Show loading indicator
152-
taskStepsList.innerHTML = '<div class="text-center py-3"><div class="spinner-border text-primary" role="status"></div><p class="mt-2">Generating task breakdown...</p></div>';
153-
taskAccommodationsList.innerHTML = '';
154-
taskResultDiv.classList.remove('d-none');
155-
generateTaskBtn.disabled = true;
156-
157167
try {
158-
const response = await fetch('/ai/task-breakdown', {
168+
const response = await fetch('/api/task-breakdown', {
159169
method: 'POST',
160170
headers: { 'Content-Type': 'application/json' },
161171
body: JSON.stringify({
@@ -174,45 +184,121 @@ document.addEventListener('DOMContentLoaded', function() {
174184
if (response.ok) {
175185
// Populate the steps list
176186
taskStepsList.innerHTML = '';
177-
result.steps.forEach(step => {
187+
const stepsArray = result.steps_for_employee.split(' | ');
188+
stepsArray.forEach(step => {
178189
const li = document.createElement('li');
179190
li.className = 'mb-2';
180-
li.textContent = step;
191+
li.textContent = step.replace(/^\d+\.\s*/, ""); ;
181192
taskStepsList.appendChild(li);
182193
});
183194

184-
// Populate the accommodations list
185-
taskAccommodationsList.innerHTML = '';
186-
result.accommodations.forEach(accommodation => {
195+
// Populate the note to job coach list
196+
taskNoteToJobCoachList.innerHTML = '';
197+
const notesArray = result.note_to_job_coach.split(' | ');
198+
notesArray.forEach(note => {
199+
const li = document.createElement('li');
200+
li.className = 'mb-2';
201+
li.textContent = note;
202+
taskNoteToJobCoachList.appendChild(li);
203+
});
204+
205+
// Populate the additional resources list
206+
taskAdditionalResourcesList.innerHTML = '';
207+
const resourcesArray = result.additional_training_resources.split(' | ');
208+
resourcesArray.forEach(resource => {
209+
const [resourceName, resourceLink] = resource.split(': ');
210+
187211
const li = document.createElement('li');
188212
li.className = 'mb-2';
189-
li.textContent = accommodation;
190-
taskAccommodationsList.appendChild(li);
213+
214+
const a = document.createElement('a');
215+
a.href = resourceLink.trim();
216+
a.textContent = resourceName.trim();
217+
a.target = "_blank";
218+
219+
li.appendChild(a);
220+
taskAdditionalResourcesList.appendChild(li);
191221
});
192222

193-
// Show the save button
223+
// Show the save button, hide the spinner, and display the result
194224
saveTaskBtn.classList.remove('d-none');
225+
loadingSpinner.classList.add('d-none');
226+
taskResultDiv.classList.remove('d-none');
195227
} else {
196228
taskStepsList.innerHTML = `<p class="text-danger">Error: ${result.error}</p>`;
197-
taskAccommodationsList.innerHTML = '';
229+
loadingSpinner.classList.add('d-none');
230+
// taskNoteToJobCoachList.innerHTML = '';
231+
// taskAdditionalResourcesList.innerHTML = '';
198232
}
199233
} catch (error) {
200234
console.error('Task breakdown error:', error);
201235
taskStepsList.innerHTML = '<p class="text-danger">An error occurred while generating the breakdown.</p>';
202-
taskAccommodationsList.innerHTML = '';
236+
loadingSpinner.classList.add('d-none');
237+
// taskNoteToJobCoachList.innerHTML = '';
238+
// taskAdditionalResourcesList.innerHTML = '';
203239
} finally {
204240
generateTaskBtn.disabled = false;
205241
}
206242
});
243+
generateAudioButton.addEventListener('click', async function() {
244+
console.log('Generate audio button clicked');
245+
const audioLoadingSpinner = document.getElementById('audioLoadingSpinner');
246+
audioLoadingSpinner.classList.remove('d-none');
247+
const taskSteps = []; // Replace with the array containing notes to be converted to audio
248+
taskStepsList.querySelectorAll('li').forEach(li => {
249+
taskSteps.push(li.textContent);
250+
});
251+
252+
const textToConvert = taskSteps.join(' '); // Combine notes into a single text string
253+
254+
try {
255+
// Call the backend to generate audio
256+
const response = await fetch('/api/generateAudio', {
257+
method: 'POST',
258+
headers: { 'Content-Type': 'application/json' },
259+
body: JSON.stringify({ text: textToConvert })
260+
});
261+
262+
if (response.ok) {
263+
// Parse the JSON response
264+
const data = await response.json();
265+
266+
// Get the base64-encoded audio data
267+
const audioBase64 = data.audio;
268+
269+
// Convert base64 string to binary data (audio)
270+
const audioBlob = new Blob([new Uint8Array(atob(audioBase64).split("").map(c => c.charCodeAt(0)))], { type: "audio/mpeg" });
271+
272+
// Create an audio URL from the blob
273+
const audioUrl = URL.createObjectURL(audioBlob);
274+
275+
// Get the audio player element
276+
const audioPlayer = document.getElementById('audioPlayer');
277+
278+
// Set the audio player source to the blob URL
279+
audioPlayer.src = audioUrl;
207280

281+
// Make the audio player visible and remove spinner
282+
audioPlayer.classList.remove('d-none');
283+
audioLoadingSpinner.classList.add('d-none');
284+
// audioPlayer.play();
285+
} else {
286+
console.error('Error generating audio:', response.statusText);
287+
audioLoadingSpinner.classList.add('d-none');
288+
}
289+
} catch (error) {
290+
console.error('Error:', error);
291+
audioLoadingSpinner.classList.add('d-none');
292+
}
293+
});
208294
saveTaskBtn.addEventListener('click', async function() {
209295
try {
210296
const response = await fetch('/upload_note', {
211297
method: 'POST',
212298
headers: { 'Content-Type': 'application/json' },
213299
body: JSON.stringify({
214300
category: 'Task Breakdown',
215-
content: `Task: ${document.getElementById('taskName').value}\nSteps:\n${Array.from(taskStepsList.children).map(li => li.textContent).join('\n')}\nAccommodations:\n${Array.from(taskAccommodationsList.children).map(li => li.textContent).join('\n')}`,
301+
content: `Task: ${document.getElementById('taskName').value}\nSteps:\n${Array.from(taskStepsList.children).map(li => li.textContent).join('\n')}\nNote to Job COach:\n${Array.from(taskNoteToJobCoachList.children).map(li => li.textContent).join('\n')}\nAdditional Resources:\n${Array.from(taskAdditionalResourcesList.children).map(li => li.textContent).join('\n')}`,
216302
consumer_id: window.consumerId || 'c001'
217303
})
218304
});
@@ -231,6 +317,33 @@ document.addEventListener('DOMContentLoaded', function() {
231317
});
232318
}
233319

320+
// Listen for when the modal is hidden (closed)
321+
$('#taskBreakdownModal').on('hidden.bs.modal', function () {
322+
// Reset form fields
323+
document.getElementById('taskBreakdownForm').reset();
324+
325+
// Reset steps, notes, and resources
326+
document.getElementById('taskSteps').innerHTML = '';
327+
document.getElementById('taskNoteToJobCoach').innerHTML = '';
328+
document.getElementById('taskAdditionalResources').innerHTML = '';
329+
330+
// Hide the audio player
331+
const audioPlayer = document.getElementById('audioPlayer');
332+
audioPlayer.classList.add('d-none');
333+
audioPlayer.pause();
334+
audioPlayer.src = ''; // Reset the audio source
335+
336+
// Hide the loading spinner
337+
document.getElementById('loadingSpinner').classList.add('d-none');
338+
339+
// Hide the result section
340+
document.getElementById('taskBreakdownResult').classList.add('d-none');
341+
342+
// Reset any other UI elements
343+
document.getElementById('saveTaskBreakdown').classList.add('d-none');
344+
});
345+
346+
234347
// Add functionality to the AI chat form
235348
// const aiChatForm = document.getElementById('aiChatForm');
236349
// if (aiChatForm) {

0 commit comments

Comments
 (0)