From 1132f7438d6e886245482a710266d65c96d20eb3 Mon Sep 17 00:00:00 2001 From: Lucas Santana Date: Sun, 22 Dec 2024 15:58:32 -0300 Subject: [PATCH] =?UTF-8?q?feat:=20reorganiza=20estrutura=20de=20m=C3=A9tr?= =?UTF-8?q?icas=20e=20feedback=20de=20leitura?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Exporta interface MetricsData do StoryMetrics para reuso - Adiciona importação da interface no StoryPage - Mantém consistência de tipos entre gravações e métricas - Melhora organização do feedback em colunas - Implementa layout responsivo para diferentes tamanhos de tela --- n8n.js | 79 ------ src/components/story/RecordingHistoryCard.tsx | 123 ++++++++++ src/components/story/StoryMetrics.tsx | 142 +++++++++++ src/components/ui/accordion.tsx | 54 +++++ src/pages/student-dashboard/StoryPage.tsx | 229 +++++++++++++++++- 5 files changed, 547 insertions(+), 80 deletions(-) delete mode 100644 n8n.js create mode 100644 src/components/story/RecordingHistoryCard.tsx create mode 100644 src/components/story/StoryMetrics.tsx create mode 100644 src/components/ui/accordion.tsx diff --git a/n8n.js b/n8n.js deleted file mode 100644 index 8341986..0000000 --- a/n8n.js +++ /dev/null @@ -1,79 +0,0 @@ -// Estrutura do fluxo -[ - { - // 1. Webhook Trigger - "name": "Webhook", - "type": "n8n-nodes-base.webhook", - "parameters": { - "path": "audio-processing", - "responseMode": "lastNode" - } - }, - { - // 2. Download do Áudio do Supabase - "name": "Supabase", - "type": "n8n-nodes-base.supabase", - "parameters": { - "operation": "download", - "bucket": "audios", - "filePath": "={{$json.file_path}}" - } - }, - { - // 3. Pré-processamento do Áudio (usando FFmpeg) - "name": "FFmpeg", - "type": "n8n-nodes-base.executeCommand", - "parameters": { - "command": "ffmpeg -i input.wav -af 'anlmdn,highpass=f=200,lowpass=f=3000,silenceremove=1:0:-50dB' output.wav" - } - }, - { - // 4. Transcrição (usando OpenAI Whisper) - "name": "Whisper", - "type": "n8n-nodes-base.httpRequest", - "parameters": { - "url": "https://api.openai.com/v1/audio/transcriptions", - "method": "POST", - "headers": { - "Authorization": "Bearer {{$env.OPENAI_API_KEY}}" - } - } - }, - { - // 5. Análise do Texto (usando GPT-4) - "name": "GPT4Analysis", - "type": "n8n-nodes-base.openAi", - "parameters": { - "model": "gpt-4", - "prompt": `Analise a seguinte transcrição considerando: - 1. Fluência (velocidade, pausas, prosódia) - 2. Pronúncia (precisão fonética, clareza) - 3. Erros (substituições, omissões) - 4. Compreensão (coerência, autocorreção) - - Transcrição: {{$node.Whisper.data.text}} - - Forneça uma análise detalhada seguindo as métricas especificadas.` - } - }, - { - // 6. Salvar Resultados no Supabase - "name": "SaveResults", - "type": "n8n-nodes-base.supabase", - "parameters": { - "operation": "insert", - "table": "audio_analysis", - "data": { - "audio_path": "={{$json.file_path}}", - "transcription": "={{$node.Whisper.data.text}}", - "analysis": "={{$node.GPT4Analysis.data.choices[0].text}}", - "metrics": { - "fluency": "={{$node.GPT4Analysis.data.metrics.fluency}}", - "pronunciation": "={{$node.GPT4Analysis.data.metrics.pronunciation}}", - "errors": "={{$node.GPT4Analysis.data.metrics.errors}}", - "comprehension": "={{$node.GPT4Analysis.data.metrics.comprehension}}" - } - } - } - } - ] \ No newline at end of file diff --git a/src/components/story/RecordingHistoryCard.tsx b/src/components/story/RecordingHistoryCard.tsx new file mode 100644 index 0000000..8e13e14 --- /dev/null +++ b/src/components/story/RecordingHistoryCard.tsx @@ -0,0 +1,123 @@ +import { Accordion, AccordionContent, AccordionItem, AccordionTrigger } from '../ui/accordion'; + +export function RecordingHistoryCard({ recording }: { recording: StoryRecording }) { + const metrics = [ + { label: 'Fluência', value: recording.fluency_score, color: 'text-blue-600' }, + { label: 'Pronúncia', value: recording.pronunciation_score, color: 'text-green-600' }, + { label: 'Precisão', value: recording.accuracy_score, color: 'text-purple-600' }, + { label: 'Compreensão', value: recording.comprehension_score, color: 'text-orange-600' } + ]; + + const details = [ + { label: 'Palavras por minuto', value: recording.words_per_minute }, + { label: 'Pausas', value: recording.pause_count }, + { label: 'Erros', value: recording.error_count }, + { label: 'Autocorreções', value: recording.self_corrections } + ]; + + return ( + + + +
+
+ + {new Date(recording.created_at).toLocaleDateString('pt-BR', { + day: '2-digit', + month: '2-digit', + year: 'numeric', + hour: '2-digit', + minute: '2-digit' + })} + +
+ +
+ {metrics.map((metric) => ( +
+ + {metric.label} + + + {metric.value}% + +
+ ))} +
+
+
+ + +
+ {/* Coluna 1: Detalhes Técnicos */} +
+
Detalhes Técnicos
+
+ {details.map((detail) => ( +
+ + {detail.label} + + + {detail.value} + +
+ ))} +
+
+ + {/* Coluna 2: Pontos Fortes e Melhorias */} +
+
+
+ + Pontos Fortes +
+
    + {recording.strengths.map((strength, i) => ( +
  • {strength}
  • + ))} +
+
+ +
+
+ + Pontos para Melhorar +
+
    + {recording.improvements.map((improvement, i) => ( +
  • {improvement}
  • + ))} +
+
+
+ + {/* Coluna 3: Sugestões e Próximos Passos */} +
+
+
+ + Sugestões +
+

+ {recording.suggestions} +

+
+ +
+
+ + Próxima Meta +
+

+ Tente alcançar {Math.min(100, recording.fluency_score + 5)}% de fluência na próxima leitura. +

+
+
+
+
+
+
+ ); +} \ No newline at end of file diff --git a/src/components/story/StoryMetrics.tsx b/src/components/story/StoryMetrics.tsx new file mode 100644 index 0000000..1e25902 --- /dev/null +++ b/src/components/story/StoryMetrics.tsx @@ -0,0 +1,142 @@ +import React from 'react'; +import { Activity, Book, Mic, Brain } from 'lucide-react'; + +export interface MetricsData { + metrics: { + fluency: number; + pronunciation: number; + accuracy: number; + comprehension: number; + }; + feedback: { + strengths: string[]; + improvements: string[]; + suggestions: string; + }; + details: { + wordsPerMinute: number; + pauseCount: number; + errorCount: number; + selfCorrections: number; + }; +} + +interface StoryMetricsProps { + data?: MetricsData; + isLoading?: boolean; +} + +export function StoryMetrics({ data, isLoading }: StoryMetricsProps): JSX.Element { + if (isLoading) { + return ( +
+
+ {[...Array(4)].map((_, i) => ( +
+ ))} +
+
+ ); + } + + if (!data) { + return ( +
+

+ Aguardando gravação para gerar métricas de leitura... +

+
+ ); + } + + const metrics = [ + { + label: 'Fluência', + value: data.metrics.fluency, + icon: Activity, + color: 'text-blue-600', + detail: `${data.details.wordsPerMinute} palavras/min` + }, + { + label: 'Pronúncia', + value: data.metrics.pronunciation, + icon: Mic, + color: 'text-green-600', + detail: `${data.details.errorCount} erros` + }, + { + label: 'Precisão', + value: data.metrics.accuracy, + icon: Book, + color: 'text-purple-600', + detail: `${data.details.selfCorrections} autocorreções` + }, + { + label: 'Compreensão', + value: data.metrics.comprehension, + icon: Brain, + color: 'text-orange-600', + detail: `${data.details.pauseCount} pausas` + } + ]; + + return ( +
+
+ {metrics.map((metric) => ( +
+
+ + {metric.value}% +
+

{metric.label}

+

{metric.detail}

+
+ ))} +
+ +
+

Feedback da Leitura

+ +
+
+

+ + Pontos Fortes +

+
    + {data.feedback.strengths.map((strength, i) => ( +
  • {strength}
  • + ))} +
+
+ +
+

+ + Pontos para Melhorar +

+
    + {data.feedback.improvements.map((improvement, i) => ( +
  • {improvement}
  • + ))} +
+
+ +
+

+ + Sugestões +

+

+ {data.feedback.suggestions} +

+
+
+
+
+ ); +} \ No newline at end of file diff --git a/src/components/ui/accordion.tsx b/src/components/ui/accordion.tsx new file mode 100644 index 0000000..46a415d --- /dev/null +++ b/src/components/ui/accordion.tsx @@ -0,0 +1,54 @@ +import * as React from 'react'; +import * as AccordionPrimitive from '@radix-ui/react-accordion'; +import { ChevronDown } from 'lucide-react'; +import { cn } from '../../lib/utils'; + +const Accordion = AccordionPrimitive.Root; + +const AccordionItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +AccordionItem.displayName = 'AccordionItem'; + +const AccordionTrigger = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + svg]:rotate-180', + className + )} + {...props} + > + {children} + + + +)); +AccordionTrigger.displayName = 'AccordionTrigger'; + +const AccordionContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + +
{children}
+
+)); +AccordionContent.displayName = 'AccordionContent'; + +export { Accordion, AccordionItem, AccordionTrigger, AccordionContent }; \ No newline at end of file diff --git a/src/pages/student-dashboard/StoryPage.tsx b/src/pages/student-dashboard/StoryPage.tsx index 2962cfc..ac6ea6b 100644 --- a/src/pages/student-dashboard/StoryPage.tsx +++ b/src/pages/student-dashboard/StoryPage.tsx @@ -1,9 +1,129 @@ import React from 'react'; -import { ArrowLeft, ArrowRight, Mic, Volume2, Share2, Save } from 'lucide-react'; +import { ArrowLeft, ArrowRight, Mic, Volume2, Share2, Save, ChevronDown, ChevronUp } from 'lucide-react'; import { useParams, useNavigate } from 'react-router-dom'; import { supabase } from '../../lib/supabase'; import { AudioRecorder } from '../../components/story/AudioRecorder'; import type { Story } from '../../types/database'; +import { StoryMetrics } from '../../components/story/StoryMetrics'; +import type { MetricsData } from '../../components/story/StoryMetrics'; + +interface StoryRecording { + id: string; + fluency_score: number; + pronunciation_score: number; + accuracy_score: number; + comprehension_score: number; + words_per_minute: number; + pause_count: number; + error_count: number; + self_corrections: number; + strengths: string[]; + improvements: string[]; + suggestions: string; + created_at: string; + processed_at: string | null; +} + +function RecordingHistoryCard({ recording }: { recording: StoryRecording }) { + const [isExpanded, setIsExpanded] = React.useState(false); + + const metrics = [ + { label: 'Fluência', value: recording.fluency_score, color: 'text-blue-600' }, + { label: 'Pronúncia', value: recording.pronunciation_score, color: 'text-green-600' }, + { label: 'Precisão', value: recording.accuracy_score, color: 'text-purple-600' }, + { label: 'Compreensão', value: recording.comprehension_score, color: 'text-orange-600' } + ]; + + return ( +
+ {/* Cabeçalho sempre visível */} + + + {/* Conteúdo expandido */} + {isExpanded && ( +
+
+
+ Palavras por minuto: + {recording.words_per_minute} +
+
+ Pausas: + {recording.pause_count} +
+
+ Erros: + {recording.error_count} +
+
+ Autocorreções: + {recording.self_corrections} +
+
+ +
+
+
Pontos Fortes
+
    + {recording.strengths.map((strength, i) => ( +
  • {strength}
  • + ))} +
+
+ +
+
Pontos para Melhorar
+
    + {recording.improvements.map((improvement, i) => ( +
  • {improvement}
  • + ))} +
+
+ +
+
Sugestões
+

{recording.suggestions}

+
+
+
+ )} +
+ ); +} export function StoryPage() { const { id } = useParams(); @@ -13,6 +133,10 @@ export function StoryPage() { const [loading, setLoading] = React.useState(true); const [error, setError] = React.useState(null); const [isPlaying, setIsPlaying] = React.useState(false); + const [metrics, setMetrics] = React.useState(); + const [loadingMetrics, setLoadingMetrics] = React.useState(true); + const [recordings, setRecordings] = React.useState([]); + const [loadingRecordings, setLoadingRecordings] = React.useState(true); React.useEffect(() => { const fetchStory = async () => { @@ -39,6 +163,53 @@ export function StoryPage() { fetchStory(); }, [id]); + React.useEffect(() => { + const fetchMetrics = async () => { + if (!story?.id) return; + + try { + const { data, error } = await supabase + .from('reading_metrics') + .select('*') + .eq('story_id', story.id) + .single(); + + if (error) throw error; + setMetrics(data); + } catch (err) { + console.error('Erro ao carregar métricas:', err); + } finally { + setLoadingMetrics(false); + } + }; + + fetchMetrics(); + }, [story?.id]); + + React.useEffect(() => { + const fetchRecordings = async () => { + if (!story?.id) return; + + try { + const { data, error } = await supabase + .from('story_recordings') + .select('*') + .eq('story_id', story.id) + .eq('status', 'completed') + .order('created_at', { ascending: false }); + + if (error) throw error; + setRecordings(data || []); + } catch (err) { + console.error('Erro ao carregar gravações:', err); + } finally { + setLoadingRecordings(false); + } + }; + + fetchRecordings(); + }, [story?.id]); + const handleShare = async () => { if (navigator.share) { try { @@ -53,6 +224,28 @@ export function StoryPage() { } }; + const getLatestRecording = () => recordings[0]; + + const formatMetricsData = (recording: StoryRecording) => ({ + metrics: { + fluency: recording.fluency_score, + pronunciation: recording.pronunciation_score, + accuracy: recording.accuracy_score, + comprehension: recording.comprehension_score + }, + feedback: { + strengths: recording.strengths, + improvements: recording.improvements, + suggestions: recording.suggestions + }, + details: { + wordsPerMinute: recording.words_per_minute, + pauseCount: recording.pause_count, + errorCount: recording.error_count, + selfCorrections: recording.self_corrections + } + }); + if (loading) { return (
@@ -105,6 +298,40 @@ export function StoryPage() {
+ {/* Dashboard de métricas */} + {loadingRecordings ? ( +
+
+
+ ) : recordings.length > 0 ? ( + + ) : ( +
+

+ Você ainda não tem gravações para esta história. + Faça sua primeira gravação para ver suas métricas! +

+
+ )} + + {/* Histórico de gravações */} + {recordings.length > 1 && ( +
+

Histórico de Gravações

+
+ {recordings.slice(1).map((recording) => ( + + ))} +
+
+ )} +
{/* Imagem da página atual */} {story.content.pages[currentPage].image && (