diff --git a/VideoPreview.md b/VideoPreview.md new file mode 100644 index 0000000..c5a003d --- /dev/null +++ b/VideoPreview.md @@ -0,0 +1,120 @@ +# Video Preview Component + +The VideoPreview component provides a complete interface for previewing rendered video with audio mixing capabilities. + +## Basic Usage + +```tsx +import { VideoPreview } from '@saebyn/glowing-telegram-video-editor'; +import type { PreviewSettings } from '@saebyn/glowing-telegram-video-editor'; + +// Example preview settings +const settings: PreviewSettings = { + cutlist: [ + { id: '1', start: 10000, end: 30000 }, + { id: '2', start: 45000, end: 75000 }, + ], + audioChannels: [ + { id: '1', name: 'Main Audio', level: 0.8, muted: false }, + { id: '2', name: 'Background Music', level: 0.3, muted: false }, + ], + waveformData: [ + { + channelId: '1', + amplitudes: [/* amplitude data */], + duration: 120000, + sampleRate: 44100, + }, + ], +}; + +function MyVideoPreview() { + const [previewSettings, setPreviewSettings] = useState(settings); + + const handleRegenerate = (newSettings: PreviewSettings) => { + // Send settings to backend to regenerate preview + console.log('Regenerating preview with:', newSettings); + }; + + const handleSave = (newSettings: PreviewSettings) => { + // Save audio settings to backend + console.log('Saving settings:', newSettings); + }; + + return ( + + ); +} +``` + +## Features + +- **HLS Video Playback**: Uses existing VideoPlayer component with HLS.js support +- **Audio Mixer**: Configurable audio levels and mute controls for each channel +- **Waveform Timeline**: Visual representation of audio with clickable seeking +- **Cutlist Visualization**: Shows selected clips on the timeline +- **Real-time Updates**: Changes to audio settings are reflected immediately +- **Keyboard Accessibility**: Waveform supports keyboard navigation + +## Component Architecture + +The VideoPreview component follows the atomic design pattern: + +### Atoms +- `AudioLevelSlider`: Individual volume control slider +- `AudioChannelControl`: Complete control for a single audio channel +- `WaveformDisplay`: Canvas-based waveform visualization + +### Molecules +- `AudioMixerPanel`: Panel containing all audio channel controls +- `PreviewTimeline`: Timeline with waveforms and cutlist visualization + +### Organisms +- `VideoPreview`: Complete preview interface composing all subcomponents + +## Types + +```tsx +interface AudioChannel { + id: string; + name: string; + level: number; // 0.0 to 1.0 + muted: boolean; +} + +interface WaveformData { + channelId: string; + amplitudes: number[]; + duration: number; + sampleRate: number; +} + +interface PreviewSettings { + cutlist: VideoClip[]; + audioChannels: AudioChannel[]; + waveformData: WaveformData[]; +} +``` + +## Backend Integration + +The component is designed to work with a backend that can: + +1. **Generate Preview Videos**: When `onRegenerate` is called, send the cutlist and audio settings to generate an HLS preview +2. **Provide Waveform Data**: Extract audio waveform data for visualization +3. **Detect Audio Channels**: Analyze source video to determine available audio channels +4. **Save Settings**: Persist audio mixing settings for final render + +## Accessibility + +- Keyboard navigation support for waveform seeking (arrow keys) +- Proper ARIA labels and roles +- Screen reader compatible controls +- Focus management for interactive elements \ No newline at end of file diff --git a/src/components/VideoPreview.test.tsx b/src/components/VideoPreview.test.tsx new file mode 100644 index 0000000..e4390d6 --- /dev/null +++ b/src/components/VideoPreview.test.tsx @@ -0,0 +1,100 @@ +import { render } from "@testing-library/react"; +import { describe, expect, it, vi } from "vitest"; + +import AudioChannelControl from "@/components/atoms/AudioChannelControl"; +import AudioLevelSlider from "@/components/atoms/AudioLevelSlider"; +import WaveformDisplay from "@/components/atoms/WaveformDisplay"; +import AudioMixerPanel from "@/components/molecules/AudioMixerPanel"; +import PreviewTimeline from "@/components/molecules/PreviewTimeline"; +import VideoPreview from "@/components/organisms/VideoPreview"; + +import type { + AudioChannel, + PreviewSettings, + VideoClip, + WaveformData, +} from "@/types"; + +describe("Video Preview Components", () => { + const sampleAudioChannel: AudioChannel = { + id: "1", + name: "Test Channel", + level: 0.5, + muted: false, + }; + + const sampleWaveformData: WaveformData = { + channelId: "1", + amplitudes: [0.1, 0.2, 0.3, 0.2, 0.1], + duration: 5000, + sampleRate: 44100, + }; + + const sampleClips: VideoClip[] = [ + { + id: "1", + start: 1000, + end: 2000, + }, + ]; + + const sampleSettings: PreviewSettings = { + cutlist: sampleClips, + audioChannels: [sampleAudioChannel], + waveformData: [sampleWaveformData], + }; + + it("renders AudioLevelSlider", () => { + const onChange = vi.fn(); + const { container } = render( + , + ); + expect(container.querySelector("input[type='range']")).toBeTruthy(); + }); + + it("renders AudioChannelControl", () => { + const onChange = vi.fn(); + const { getByText } = render( + , + ); + expect(getByText("Test Channel")).toBeTruthy(); + }); + + it("renders WaveformDisplay", () => { + const { container } = render( + , + ); + expect(container.querySelector("canvas")).toBeTruthy(); + }); + + it("renders AudioMixerPanel", () => { + const onChange = vi.fn(); + const { getByText } = render( + , + ); + expect(getByText("Audio Mixer")).toBeTruthy(); + }); + + it("renders PreviewTimeline", () => { + const { getByText } = render( + , + ); + expect(getByText("Preview Timeline")).toBeTruthy(); + }); + + it("renders VideoPreview", () => { + const { getByText } = render( + , + ); + expect(getByText("Video Preview")).toBeTruthy(); + }); +}); diff --git a/src/components/atoms/AudioChannelControl.stories.tsx b/src/components/atoms/AudioChannelControl.stories.tsx new file mode 100644 index 0000000..5fb6b1c --- /dev/null +++ b/src/components/atoms/AudioChannelControl.stories.tsx @@ -0,0 +1,116 @@ +import type { AudioChannel } from "@/types"; +import { action } from "@storybook/addon-actions"; +import { useState } from "react"; +import AudioChannelControl from "./AudioChannelControl"; + +export default { + title: "Atoms/AudioChannelControl", + component: AudioChannelControl, + tags: ["atoms"], +}; + +const mockChannel: AudioChannel = { + id: "channel-1", + name: "Audio Track 1", + level: 0.75, + muted: false, +}; + +const mockMutedChannel: AudioChannel = { + id: "channel-2", + name: "Audio Track 2", + level: 0.5, + muted: true, +}; + +// Interactive wrapper for testing name editing +function InteractiveWrapper({ + initialChannel, + ...props +}: { + initialChannel: AudioChannel; +} & Partial>) { + const [channel, setChannel] = useState(initialChannel); + + const handleChange = (updatedChannel: AudioChannel) => { + setChannel(updatedChannel); + action("onChange")(updatedChannel); + }; + + return ( + + ); +} + +export const Default = { + render: () => , +}; + +export const Muted = { + render: () => , +}; + +export const Disabled = { + render: () => ( + + ), +}; + +export const LowLevel = { + render: () => ( + + ), +}; + +export const HighLevel = { + render: () => ( + + ), +}; + +export const WithNameEdit = { + render: () => ( + + ), +}; + +export const WithNameEditMuted = { + render: () => ( + + ), +}; + +export const WithNameEditDisabled = { + render: () => ( + + ), +}; + +export const WithNameEditEmptyName = { + render: () => ( + + ), +}; diff --git a/src/components/atoms/AudioChannelControl.tsx b/src/components/atoms/AudioChannelControl.tsx new file mode 100644 index 0000000..2020771 --- /dev/null +++ b/src/components/atoms/AudioChannelControl.tsx @@ -0,0 +1,77 @@ +import type { AudioChannel } from "@/types"; +import AudioChannelNameEditor from "./AudioChannelNameEditor"; +import AudioLevelSlider from "./AudioLevelSlider"; +import IconButton from "./IconButton"; + +interface AudioChannelControlProps { + /** + * Audio channel configuration + */ + channel: AudioChannel; + /** + * Callback when channel settings change + */ + onChange: (channel: AudioChannel) => void; + /** + * Whether the control is disabled + */ + disabled?: boolean; + /** + * Whether the channel name can be edited + */ + allowNameEdit?: boolean; +} + +export default function AudioChannelControl({ + channel, + onChange, + disabled = false, + allowNameEdit = false, +}: AudioChannelControlProps) { + const handleLevelChange = (level: number) => { + onChange({ ...channel, level }); + }; + + const handleMuteToggle = () => { + onChange({ ...channel, muted: !channel.muted }); + }; + + const handleNameChange = (name: string) => { + onChange({ ...channel, name }); + }; + + return ( +
+
+
+ {allowNameEdit ? ( + + ) : ( +
+ {channel.name} +
+ )} +
+ +
+
+ +
+
+ ); +} diff --git a/src/components/atoms/AudioChannelNameEditor.stories.tsx b/src/components/atoms/AudioChannelNameEditor.stories.tsx new file mode 100644 index 0000000..d955bc8 --- /dev/null +++ b/src/components/atoms/AudioChannelNameEditor.stories.tsx @@ -0,0 +1,126 @@ +import { action } from "@storybook/addon-actions"; +import { useState } from "react"; +import AudioChannelNameEditor from "./AudioChannelNameEditor"; + +export default { + title: "Atoms/AudioChannelNameEditor", + component: AudioChannelNameEditor, + tags: ["atoms"], +}; + +// Interactive wrapper component for stories +function InteractiveWrapper({ + initialName, + ...props +}: { initialName: string } & Partial< + React.ComponentProps +>) { + const [name, setName] = useState(initialName); + + const handleNameChange = (newName: string) => { + setName(newName); + action("onNameChange")(newName); + }; + + return ( +
+ +
+ ); +} + +export const Default = { + render: () => , +}; + +export const EmptyName = { + render: () => , +}; + +export const LongName = { + render: () => ( + + ), +}; + +export const CustomPlaceholder = { + render: () => ( + + ), +}; + +export const Disabled = { + render: () => ( + + ), +}; + +export const DisabledEmpty = { + render: () => ( + + ), +}; + +export const ShortMaxLength = { + render: () => , +}; + +export const MultipleChannels = { + render: () => { + const [channels, setChannels] = useState([ + { id: "1", name: "Main Audio" }, + { id: "2", name: "Commentary" }, + { id: "3", name: "" }, + { id: "4", name: "Music Track" }, + ]); + + const handleChannelNameChange = (id: string, newName: string) => { + setChannels((prev) => + prev.map((channel) => + channel.id === id ? { ...channel, name: newName } : channel, + ), + ); + action("onNameChange")(`Channel ${id}: ${newName}`); + }; + + return ( +
+ {channels.map((channel) => ( +
+ #{channel.id} + + handleChannelNameChange(channel.id, newName) + } + placeholder={`Channel ${channel.id}`} + /> +
+ ))} +
+ ); + }, +}; + +export const InDarkMode = { + render: () => ( +
+
+ + + +
+
+ ), +}; diff --git a/src/components/atoms/AudioChannelNameEditor.tsx b/src/components/atoms/AudioChannelNameEditor.tsx new file mode 100644 index 0000000..f19465c --- /dev/null +++ b/src/components/atoms/AudioChannelNameEditor.tsx @@ -0,0 +1,117 @@ +import { useEffect, useRef, useState } from "react"; + +interface AudioChannelNameEditorProps { + /** + * Current name of the audio channel + */ + name: string; + /** + * Callback when name changes + */ + onNameChange: (name: string) => void; + /** + * Whether the editor is disabled + */ + disabled?: boolean; + /** + * Placeholder text when name is empty + */ + placeholder?: string; + /** + * Maximum length for channel name + */ + maxLength?: number; +} + +export default function AudioChannelNameEditor({ + name, + onNameChange, + disabled = false, + placeholder = "Channel Name", + maxLength = 50, +}: AudioChannelNameEditorProps) { + const [isEditing, setIsEditing] = useState(false); + const [editingName, setEditingName] = useState(name); + const inputRef = useRef(null); + + useEffect(() => { + setEditingName(name); + }, [name]); + + useEffect(() => { + if (isEditing && inputRef.current) { + inputRef.current.focus(); + inputRef.current.select(); + } + }, [isEditing]); + + const handleStartEdit = () => { + if (!disabled) { + setIsEditing(true); + setEditingName(name); + } + }; + + const handleSave = () => { + const trimmedName = editingName.trim(); + if (trimmedName && trimmedName !== name) { + onNameChange(trimmedName); + } + setIsEditing(false); + }; + + const handleCancel = () => { + setEditingName(name); + setIsEditing(false); + }; + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === "Enter") { + e.preventDefault(); + handleSave(); + } else if (e.key === "Escape") { + e.preventDefault(); + handleCancel(); + } + }; + + const handleBlur = () => { + handleSave(); + }; + + if (isEditing) { + return ( + setEditingName(e.target.value)} + onKeyDown={handleKeyDown} + onBlur={handleBlur} + maxLength={maxLength} + placeholder={placeholder} + className="text-sm font-medium bg-white dark:bg-gray-800 border border-blue-500 dark:border-blue-400 rounded px-2 py-1 text-gray-900 dark:text-white min-w-0 w-full focus:outline-none focus:ring-2 focus:ring-blue-500 dark:focus:ring-blue-400" + aria-label="Edit channel name" + /> + ); + } + + return ( + + ); +} diff --git a/src/components/atoms/AudioLevelSlider.stories.tsx b/src/components/atoms/AudioLevelSlider.stories.tsx new file mode 100644 index 0000000..178dda7 --- /dev/null +++ b/src/components/atoms/AudioLevelSlider.stories.tsx @@ -0,0 +1,58 @@ +import { action } from "@storybook/addon-actions"; +import AudioLevelSlider from "./AudioLevelSlider"; + +export default { + title: "Atoms/AudioLevelSlider", + component: AudioLevelSlider, + tags: ["atoms"], +}; + +export const Default = { + args: { + level: 0.75, + onChange: action("onChange"), + disabled: false, + }, +}; + +export const WithLabel = { + args: { + level: 0.5, + onChange: action("onChange"), + label: "Master Volume", + disabled: false, + }, +}; + +export const Muted = { + args: { + level: 0, + onChange: action("onChange"), + disabled: false, + }, +}; + +export const FullVolume = { + args: { + level: 1.0, + onChange: action("onChange"), + disabled: false, + }, +}; + +export const Disabled = { + args: { + level: 0.6, + onChange: action("onChange"), + disabled: true, + }, +}; + +export const DisabledWithLabel = { + args: { + level: 0.3, + onChange: action("onChange"), + label: "Disabled Track", + disabled: true, + }, +}; diff --git a/src/components/atoms/AudioLevelSlider.tsx b/src/components/atoms/AudioLevelSlider.tsx new file mode 100644 index 0000000..3e420f4 --- /dev/null +++ b/src/components/atoms/AudioLevelSlider.tsx @@ -0,0 +1,66 @@ +import type React from "react"; +import { useId } from "react"; + +interface AudioLevelSliderProps { + /** + * Current audio level from 0.0 to 1.0 + */ + level: number; + /** + * Callback when level changes + */ + onChange: (level: number) => void; + /** + * Label for the slider + */ + label?: string; + /** + * Whether the slider is disabled + */ + disabled?: boolean; +} + +export default function AudioLevelSlider({ + level, + onChange, + label, + disabled = false, +}: AudioLevelSliderProps) { + const sliderId = useId(); + + const handleChange = (event: React.ChangeEvent) => { + const newLevel = Number.parseFloat(event.target.value); + onChange(newLevel); + }; + + return ( +
+ {label && ( + + )} +
+ 0 + + 1 +
+
+ {Math.round(level * 100)}% +
+
+ ); +} diff --git a/src/components/atoms/WaveformDisplay.stories.tsx b/src/components/atoms/WaveformDisplay.stories.tsx new file mode 100644 index 0000000..cb95630 --- /dev/null +++ b/src/components/atoms/WaveformDisplay.stories.tsx @@ -0,0 +1,91 @@ +import type { WaveformData } from "@/types"; +import { action } from "@storybook/addon-actions"; +import WaveformDisplay from "./WaveformDisplay"; + +export default { + title: "Atoms/WaveformDisplay", + component: WaveformDisplay, + tags: ["atoms"], +}; + +// Generate sample waveform data +const generateWaveformData = (samples = 200): WaveformData => { + const amplitudes: number[] = []; + for (let i = 0; i < samples; i++) { + // Generate a mix of sine waves for realistic waveform + const freq1 = Math.sin((i / samples) * Math.PI * 4) * 0.5; + const freq2 = Math.sin((i / samples) * Math.PI * 8) * 0.3; + const noise = (Math.random() - 0.5) * 0.2; + amplitudes.push(Math.abs(freq1 + freq2 + noise)); + } + return { + channelId: "channel-1", + amplitudes, + duration: 60000, // 1 minute + sampleRate: 44100, + }; +}; + +const mockWaveformData = generateWaveformData(); + +export const Default = { + args: { + waveformData: mockWaveformData, + width: 400, + height: 80, + playheadPosition: 15000, // 15 seconds + onSeek: action("onSeek"), + }, +}; + +export const Large = { + args: { + waveformData: mockWaveformData, + width: 800, + height: 120, + playheadPosition: 30000, // 30 seconds + onSeek: action("onSeek"), + }, +}; + +export const CustomColors = { + args: { + waveformData: mockWaveformData, + width: 600, + height: 100, + playheadPosition: 45000, // 45 seconds + color: "#10b981", // emerald + playheadColor: "#f59e0b", // amber + onSeek: action("onSeek"), + }, +}; + +export const Interactive = { + args: { + waveformData: mockWaveformData, + width: 500, + height: 80, + playheadPosition: 0, + onSeek: action("onSeek"), + }, +}; + +export const NoPlayhead = { + args: { + waveformData: mockWaveformData, + width: 400, + height: 80, + // No playheadPosition provided - should default to 0 + onSeek: action("onSeek"), + }, +}; + +export const ReadOnly = { + args: { + waveformData: mockWaveformData, + width: 400, + height: 80, + playheadPosition: 20000, + // No onSeek callback - should be read-only + }, +}; diff --git a/src/components/atoms/WaveformDisplay.tsx b/src/components/atoms/WaveformDisplay.tsx new file mode 100644 index 0000000..67398eb --- /dev/null +++ b/src/components/atoms/WaveformDisplay.tsx @@ -0,0 +1,141 @@ +import type { WaveformData } from "@/types"; +import { useEffect, useRef } from "react"; + +interface WaveformDisplayProps { + /** + * Waveform data to display + */ + waveformData: WaveformData; + /** + * Width of the waveform display + */ + width?: number; + /** + * Height of the waveform display + */ + height?: number; + /** + * Current playhead position in milliseconds + */ + playheadPosition?: number; + /** + * Color of the waveform + */ + color?: string; + /** + * Color of the playhead + */ + playheadColor?: string; + /** + * Callback when user clicks on the waveform + */ + onSeek?: (milliseconds: number) => void; +} + +export default function WaveformDisplay({ + waveformData, + width = 400, + height = 80, + playheadPosition = 0, + color = "#3b82f6", + playheadColor = "#ef4444", + onSeek, +}: WaveformDisplayProps) { + const canvasRef = useRef(null); + + useEffect(() => { + const canvas = canvasRef.current; + if (!canvas) return; + + const ctx = canvas.getContext("2d"); + if (!ctx) return; + + // Clear canvas + ctx.clearRect(0, 0, width, height); + + // Draw waveform + const { amplitudes } = waveformData; + if (amplitudes.length === 0) return; + + const barWidth = width / amplitudes.length; + const halfHeight = height / 2; + + ctx.fillStyle = color; + + for (let i = 0; i < amplitudes.length; i++) { + const amplitude = Math.abs(amplitudes[i]); + const barHeight = amplitude * halfHeight; + const x = i * barWidth; + + // Draw positive amplitude + ctx.fillRect(x, halfHeight - barHeight, barWidth - 1, barHeight); + // Draw negative amplitude (mirrored) + ctx.fillRect(x, halfHeight, barWidth - 1, barHeight); + } + + // Draw playhead + if (playheadPosition >= 0 && waveformData.duration > 0) { + const playheadX = (playheadPosition / waveformData.duration) * width; + ctx.strokeStyle = playheadColor; + ctx.lineWidth = 2; + ctx.beginPath(); + ctx.moveTo(playheadX, 0); + ctx.lineTo(playheadX, height); + ctx.stroke(); + } + }, [waveformData, width, height, playheadPosition, color, playheadColor]); + + const handleClick = (event: React.MouseEvent) => { + if (!onSeek || waveformData.duration === 0) return; + + const canvas = canvasRef.current; + if (!canvas) return; + + const rect = canvas.getBoundingClientRect(); + const x = event.clientX - rect.left; + const relativeX = x / width; + const seekTime = relativeX * waveformData.duration; + + onSeek(Math.max(0, Math.min(seekTime, waveformData.duration))); + }; + + const handleKeyDown = (event: React.KeyboardEvent) => { + if (!onSeek || waveformData.duration === 0) return; + + // Allow seeking with arrow keys + if (event.key === "ArrowLeft" || event.key === "ArrowRight") { + event.preventDefault(); + const step = waveformData.duration * 0.05; // 5% step + const currentTime = playheadPosition || 0; + const newTime = + event.key === "ArrowLeft" + ? Math.max(0, currentTime - step) + : Math.min(waveformData.duration, currentTime + step); + onSeek(newTime); + } + }; + + return ( +
+ + {waveformData.amplitudes.length === 0 && ( +
+ No waveform data +
+ )} +
+ ); +} diff --git a/src/components/molecules/AudioMixerPanel.stories.tsx b/src/components/molecules/AudioMixerPanel.stories.tsx new file mode 100644 index 0000000..26d5619 --- /dev/null +++ b/src/components/molecules/AudioMixerPanel.stories.tsx @@ -0,0 +1,157 @@ +import type { AudioChannel } from "@/types"; +import { action } from "@storybook/addon-actions"; +import { useState } from "react"; +import AudioMixerPanel from "./AudioMixerPanel"; + +export default { + title: "Molecules/AudioMixerPanel", + component: AudioMixerPanel, + tags: ["molecules"], + decorators: [ + (story: () => React.ReactNode) => ( +
+ {story()} +
+ ), + ], +}; + +const mockChannels: AudioChannel[] = [ + { + id: "channel-1", + name: "Game Audio", + level: 0.8, + muted: false, + }, + { + id: "channel-2", + name: "Microphone", + level: 0.6, + muted: false, + }, + { + id: "channel-3", + name: "Desktop Audio", + level: 0.4, + muted: true, + }, +]; + +// Interactive wrapper for testing name editing +function InteractiveWrapper({ + initialChannels, + ...props +}: { + initialChannels: AudioChannel[]; +} & Partial>) { + const [channels, setChannels] = useState(initialChannels); + + const handleChange = (updatedChannels: AudioChannel[]) => { + setChannels(updatedChannels); + action("onChange")(updatedChannels); + }; + + return ( + + ); +} + +export const Default = { + render: () => , +}; + +export const SingleChannel = { + render: () => , +}; + +export const ManyChannels = { + render: () => ( + + ), +}; + +export const Disabled = { + render: () => ( + + ), +}; + +export const Saving = { + render: () => ( + + ), +}; + +export const AllMuted = { + render: () => ( + ({ + ...channel, + muted: true, + }))} + /> + ), +}; + +export const NoSaveCallback = { + render: () => ( + + ), +}; + +export const WithNameEdit = { + render: () => ( + + ), +}; + +export const WithNameEditAndEmptyNames = { + render: () => ( + + ), +}; + +export const WithNameEditDisabled = { + render: () => ( + + ), +}; diff --git a/src/components/molecules/AudioMixerPanel.tsx b/src/components/molecules/AudioMixerPanel.tsx new file mode 100644 index 0000000..4a3c394 --- /dev/null +++ b/src/components/molecules/AudioMixerPanel.tsx @@ -0,0 +1,130 @@ +import AudioChannelControl from "@/components/atoms/AudioChannelControl"; +import Button from "@/components/atoms/Button"; +import type { AudioChannel } from "@/types"; +import { hasAudioChanges, isAllMuted } from "@/utils/audioChannels"; + +interface AudioMixerPanelProps { + /** + * Array of audio channels to control + */ + channels: AudioChannel[]; + /** + * Callback when any channel changes + */ + onChange: (channels: AudioChannel[]) => void; + /** + * Callback when save is clicked + */ + onSave?: () => void; + /** + * Whether the mixer is disabled + */ + disabled?: boolean; + /** + * Whether save is in progress + */ + saving?: boolean; + /** + * Whether channel names can be edited + */ + allowNameEdit?: boolean; +} + +export default function AudioMixerPanel({ + channels, + onChange, + onSave, + disabled = false, + saving = false, + allowNameEdit = false, +}: AudioMixerPanelProps) { + const handleChannelChange = (updatedChannel: AudioChannel) => { + const updatedChannels = channels.map((channel) => + channel.id === updatedChannel.id ? updatedChannel : channel, + ); + onChange(updatedChannels); + }; + + const handleMasterMute = () => { + const allMuted = channels.every((channel) => channel.muted); + const updatedChannels = channels.map((channel) => ({ + ...channel, + muted: !allMuted, + })); + onChange(updatedChannels); + }; + + const handleResetLevels = () => { + const resetChannels = channels.map((channel) => ({ + ...channel, + level: 1.0, + muted: false, + })); + onChange(resetChannels); + }; + + const allMuted = isAllMuted(channels); + const hasChanges = hasAudioChanges(channels); + + return ( +
+
+

+ Audio Mixer +

+
+ + + {onSave && ( + + )} +
+
+ +
+ {channels.length === 0 ? ( +
+ No audio channels available +
+ ) : ( + channels.map((channel) => ( + + )) + )} +
+ + {channels.length > 0 && ( +
+
+ {channels.filter((c) => !c.muted).length} of {channels.length}{" "} + channels active +
+
+ )} +
+ ); +} diff --git a/src/components/molecules/PreviewTimeline.stories.tsx b/src/components/molecules/PreviewTimeline.stories.tsx new file mode 100644 index 0000000..2b83869 --- /dev/null +++ b/src/components/molecules/PreviewTimeline.stories.tsx @@ -0,0 +1,162 @@ +import type { VideoClip, WaveformData } from "@/types"; +import { action } from "@storybook/addon-actions"; +import PreviewTimeline from "./PreviewTimeline"; + +export default { + title: "Molecules/PreviewTimeline", + component: PreviewTimeline, + tags: ["molecules"], + decorators: [ + (story: () => React.ReactNode) => ( +
+ {story()} +
+ ), + ], +}; + +// Generate sample waveform data +const generateWaveformData = ( + channelId: string, + samples = 400, +): WaveformData => { + const amplitudes: number[] = []; + for (let i = 0; i < samples; i++) { + // Generate different patterns for different channels + let amplitude = 0; + if (channelId === "channel-1") { + // Game audio - more complex waveform + amplitude = + Math.sin((i / samples) * Math.PI * 6) * 0.6 + + Math.sin((i / samples) * Math.PI * 12) * 0.3; + } else if (channelId === "channel-2") { + // Microphone - speech-like pattern + amplitude = + Math.sin((i / samples) * Math.PI * 3) * 0.4 + + (Math.random() - 0.5) * 0.2; + } else { + // Desktop audio - more uniform + amplitude = Math.sin((i / samples) * Math.PI * 4) * 0.5; + } + amplitudes.push(Math.abs(amplitude)); + } + return { + channelId, + amplitudes, + duration: 120000, // 2 minutes + sampleRate: 44100, + }; +}; + +const mockWaveformData: WaveformData[] = [ + generateWaveformData("channel-1"), + generateWaveformData("channel-2"), + generateWaveformData("channel-3"), +]; + +const mockCutlist: VideoClip[] = [ + { + id: "clip-1", + start: 5000, // 5 seconds + end: 25000, // 25 seconds + }, + { + id: "clip-2", + start: 40000, // 40 seconds + end: 70000, // 70 seconds + }, + { + id: "clip-3", + start: 90000, // 90 seconds + end: 110000, // 110 seconds + }, +]; + +export const Default = { + args: { + waveformData: mockWaveformData, + playheadPosition: 30000, // 30 seconds + cutlist: mockCutlist, + duration: 120000, // 2 minutes + width: 800, + waveformHeight: 60, + onSeek: action("onSeek"), + onCutlistChange: action("onCutlistChange"), + }, +}; + +export const SingleChannel = { + args: { + waveformData: [mockWaveformData[0]], + playheadPosition: 15000, + cutlist: mockCutlist, + duration: 120000, + width: 800, + waveformHeight: 80, + onSeek: action("onSeek"), + onCutlistChange: action("onCutlistChange"), + }, +}; + +export const LongTimeline = { + args: { + waveformData: mockWaveformData, + playheadPosition: 180000, // 3 minutes + cutlist: [ + ...mockCutlist, + { + id: "clip-4", + start: 150000, // 2.5 minutes + end: 210000, // 3.5 minutes + }, + { + id: "clip-5", + start: 240000, // 4 minutes + end: 280000, // 4:40 + }, + ], + duration: 300000, // 5 minutes + width: 1200, + waveformHeight: 50, + onSeek: action("onSeek"), + onCutlistChange: action("onCutlistChange"), + }, +}; + +export const EmptyCutlist = { + args: { + waveformData: mockWaveformData, + playheadPosition: 45000, + cutlist: [], + duration: 120000, + width: 800, + waveformHeight: 60, + onSeek: action("onSeek"), + onCutlistChange: action("onCutlistChange"), + }, +}; + +export const ReadOnly = { + args: { + waveformData: mockWaveformData, + playheadPosition: 60000, + cutlist: mockCutlist, + duration: 120000, + width: 800, + waveformHeight: 60, + // No callbacks provided - should be read-only + }, +}; + +export const CompactView = { + args: { + waveformData: mockWaveformData, + playheadPosition: 30000, + cutlist: mockCutlist, + duration: 120000, + width: 600, + waveformHeight: 40, + onSeek: action("onSeek"), + onCutlistChange: action("onCutlistChange"), + }, +}; diff --git a/src/components/molecules/PreviewTimeline.tsx b/src/components/molecules/PreviewTimeline.tsx new file mode 100644 index 0000000..01660b0 --- /dev/null +++ b/src/components/molecules/PreviewTimeline.tsx @@ -0,0 +1,143 @@ +import WaveformDisplay from "@/components/atoms/WaveformDisplay"; +import type { VideoClip, WaveformData } from "@/types"; +import { formatMs } from "@/utils/duration"; + +interface PreviewTimelineProps { + /** + * Waveform data for all channels + */ + waveformData: WaveformData[]; + /** + * Current playhead position in milliseconds + */ + playheadPosition: number; + /** + * Selected cutlist clips + */ + cutlist: VideoClip[]; + /** + * Total duration in milliseconds + */ + duration: number; + /** + * Width of the timeline + */ + width?: number; + /** + * Height per waveform channel + */ + waveformHeight?: number; + /** + * Callback when user seeks to a time + */ + onSeek?: (milliseconds: number) => void; + /** + * Callback when cutlist is updated + */ + onCutlistChange?: (cutlist: VideoClip[]) => void; +} + +export default function PreviewTimeline({ + waveformData, + playheadPosition, + cutlist, + duration, + width = 800, + waveformHeight = 60, + onSeek, + onCutlistChange, +}: PreviewTimelineProps) { + const colors = [ + "#3b82f6", // blue + "#10b981", // emerald + "#f59e0b", // amber + "#ef4444", // red + "#8b5cf6", // violet + "#06b6d4", // cyan + ]; + + const handleSeek = (milliseconds: number) => { + onSeek?.(Math.max(0, Math.min(milliseconds, duration))); + }; + + return ( +
+
+

+ Preview Timeline +

+
+ {formatMs(playheadPosition)} / {formatMs(duration)} +
+
+ + {/* Cutlist visualization */} +
+
+ Selected Clips ({cutlist.length}) +
+
+ {cutlist.map((clip, index) => { + const left = (clip.start / duration) * 100; + const width = ((clip.end - clip.start) / duration) * 100; + return ( +
+ + {index + 1} + +
+ ); + })} + + {/* Playhead indicator */} +
+
+
+ + {/* Waveform displays */} +
+ {waveformData.length === 0 ? ( +
+ No waveform data available +
+ ) : ( + waveformData.map((waveform, index) => ( +
+
+ Channel {waveform.channelId} +
+ +
+ )) + )} +
+ + {/* Timeline ruler */} +
+
+ {formatMs(0)} + {formatMs(duration / 4)} + {formatMs(duration / 2)} + {formatMs((duration * 3) / 4)} + {formatMs(duration)} +
+
+
+
+ ); +} diff --git a/src/components/organisms/VideoPreview.stories.tsx b/src/components/organisms/VideoPreview.stories.tsx new file mode 100644 index 0000000..fb63e02 --- /dev/null +++ b/src/components/organisms/VideoPreview.stories.tsx @@ -0,0 +1,205 @@ +import type { + AudioChannel, + PreviewSettings, + VideoClip, + WaveformData, +} from "@/types"; +import { action } from "@storybook/addon-actions"; +import VideoPreview from "./VideoPreview"; + +export default { + title: "Organisms/VideoPreview", + component: VideoPreview, + tags: ["organisms"], + decorators: [ + (story: () => React.ReactNode) => ( +
{story()}
+ ), + ], + parameters: { + layout: "fullscreen", + }, +}; + +// Generate sample waveform data +const generateWaveformData = ( + channelId: string, + samples = 400, +): WaveformData => { + const amplitudes: number[] = []; + for (let i = 0; i < samples; i++) { + let amplitude = 0; + if (channelId === "channel-1") { + amplitude = + Math.sin((i / samples) * Math.PI * 6) * 0.6 + + Math.sin((i / samples) * Math.PI * 12) * 0.3; + } else if (channelId === "channel-2") { + amplitude = + Math.sin((i / samples) * Math.PI * 3) * 0.4 + + (Math.random() - 0.5) * 0.2; + } else { + amplitude = Math.sin((i / samples) * Math.PI * 4) * 0.5; + } + amplitudes.push(Math.abs(amplitude)); + } + return { + channelId, + amplitudes, + duration: 180000, // 3 minutes + sampleRate: 44100, + }; +}; + +const mockAudioChannels: AudioChannel[] = [ + { + id: "channel-1", + name: "Game Audio", + level: 0.8, + muted: false, + }, + { + id: "channel-2", + name: "Microphone", + level: 0.6, + muted: false, + }, + { + id: "channel-3", + name: "Desktop Audio", + level: 0.4, + muted: true, + }, +]; + +const mockWaveformData: WaveformData[] = [ + generateWaveformData("channel-1"), + generateWaveformData("channel-2"), + generateWaveformData("channel-3"), +]; + +const mockCutlist: VideoClip[] = [ + { + id: "clip-1", + start: 10000, // 10 seconds + end: 45000, // 45 seconds + }, + { + id: "clip-2", + start: 60000, // 1 minute + end: 120000, // 2 minutes + }, + { + id: "clip-3", + start: 140000, // 2:20 + end: 170000, // 2:50 + }, +]; + +const mockPreviewSettings: PreviewSettings = { + cutlist: mockCutlist, + audioChannels: mockAudioChannels, + waveformData: mockWaveformData, +}; + +export const Default = { + args: { + settings: mockPreviewSettings, + previewVideoUrl: + "https://demo.unified-streaming.com/k8s/features/stable/video/tears-of-steel/tears-of-steel.ism/.m3u8", + playheadPosition: 30000, // 30 seconds + duration: 180000, // 3 minutes + onSettingsChange: action("onSettingsChange"), + onRegenerate: action("onRegenerate"), + onSave: action("onSave"), + regenerating: false, + saving: false, + }, +}; + +export const Regenerating = { + args: { + settings: mockPreviewSettings, + previewVideoUrl: + "https://demo.unified-streaming.com/k8s/features/stable/video/tears-of-steel/tears-of-steel.ism/.m3u8", + playheadPosition: 60000, + duration: 180000, + onSettingsChange: action("onSettingsChange"), + onRegenerate: action("onRegenerate"), + onSave: action("onSave"), + regenerating: true, + saving: false, + }, +}; + +export const Saving = { + args: { + settings: mockPreviewSettings, + previewVideoUrl: + "https://demo.unified-streaming.com/k8s/features/stable/video/tears-of-steel/tears-of-steel.ism/.m3u8", + playheadPosition: 90000, + duration: 180000, + onSettingsChange: action("onSettingsChange"), + onRegenerate: action("onRegenerate"), + onSave: action("onSave"), + regenerating: false, + saving: true, + }, +}; + +export const SingleChannel = { + args: { + settings: { + ...mockPreviewSettings, + audioChannels: [mockAudioChannels[0]], + waveformData: [mockWaveformData[0]], + }, + previewVideoUrl: + "https://demo.unified-streaming.com/k8s/features/stable/video/tears-of-steel/tears-of-steel.ism/.m3u8", + playheadPosition: 45000, + duration: 180000, + onSettingsChange: action("onSettingsChange"), + onRegenerate: action("onRegenerate"), + onSave: action("onSave"), + regenerating: false, + saving: false, + }, +}; + +export const EmptyCutlist = { + args: { + settings: { + ...mockPreviewSettings, + cutlist: [], + }, + previewVideoUrl: + "https://demo.unified-streaming.com/k8s/features/stable/video/tears-of-steel/tears-of-steel.ism/.m3u8", + playheadPosition: 0, + duration: 180000, + onSettingsChange: action("onSettingsChange"), + onRegenerate: action("onRegenerate"), + onSave: action("onSave"), + regenerating: false, + saving: false, + }, +}; + +export const AllMuted = { + args: { + settings: { + ...mockPreviewSettings, + audioChannels: mockAudioChannels.map((channel) => ({ + ...channel, + muted: true, + })), + }, + previewVideoUrl: + "https://demo.unified-streaming.com/k8s/features/stable/video/tears-of-steel/tears-of-steel.ism/.m3u8", + playheadPosition: 75000, + duration: 180000, + onSettingsChange: action("onSettingsChange"), + onRegenerate: action("onRegenerate"), + onSave: action("onSave"), + regenerating: false, + saving: false, + }, +}; diff --git a/src/components/organisms/VideoPreview.tsx b/src/components/organisms/VideoPreview.tsx new file mode 100644 index 0000000..3820911 --- /dev/null +++ b/src/components/organisms/VideoPreview.tsx @@ -0,0 +1,182 @@ +import Button from "@/components/atoms/Button"; +import AudioMixerPanel from "@/components/molecules/AudioMixerPanel"; +import PreviewTimeline from "@/components/molecules/PreviewTimeline"; +import VideoPlayer, { + type VideoPlayerRef, +} from "@/components/molecules/VideoPlayer"; +import type { AudioChannel, PreviewSettings, VideoClip } from "@/types"; +import { hasAudioChanges } from "@/utils/audioChannels"; +import { useRef, useState } from "react"; + +interface VideoPreviewProps { + /** + * Preview settings including cutlist and audio configuration + */ + settings: PreviewSettings; + /** + * URL to the preview video (HLS stream) + */ + previewVideoUrl: string; + /** + * Current playhead position in milliseconds + */ + playheadPosition?: number; + /** + * Total duration of the preview in milliseconds + */ + duration: number; + /** + * Callback when preview settings change + */ + onSettingsChange?: (settings: PreviewSettings) => void; + /** + * Callback when user requests to re-render preview + */ + onRegenerate?: (settings: PreviewSettings) => void; + /** + * Callback when user saves settings + */ + onSave?: (settings: PreviewSettings) => void; + /** + * Whether the preview is currently being generated + */ + regenerating?: boolean; + /** + * Whether save is in progress + */ + saving?: boolean; +} + +export default function VideoPreview({ + settings, + previewVideoUrl, + playheadPosition = 0, + duration, + onSettingsChange, + onRegenerate, + onSave, + regenerating = false, + saving = false, +}: VideoPreviewProps) { + const videoPlayerRef = useRef(null); + const [currentTime, setCurrentTime] = useState(playheadPosition); + + const handleAudioChannelsChange = (channels: AudioChannel[]) => { + const updatedSettings = { + ...settings, + audioChannels: channels, + }; + onSettingsChange?.(updatedSettings); + }; + + const handleCutlistChange = (cutlist: VideoClip[]) => { + const updatedSettings = { + ...settings, + cutlist, + }; + onSettingsChange?.(updatedSettings); + }; + + const handleSeekToTime = (milliseconds: number) => { + videoPlayerRef.current?.seekTo(milliseconds); + setCurrentTime(milliseconds); + }; + + const handleTimeUpdate = (time: number) => { + setCurrentTime(time); + }; + + const handleRegenerate = () => { + onRegenerate?.(settings); + }; + + const handleSave = () => { + onSave?.(settings); + }; + + const hasChanges = hasAudioChanges(settings.audioChannels); + + return ( +
+ {/* Header */} +
+
+

+ Video Preview +

+
+ + +
+
+
+ +
+ {/* Main content area */} +
+ {/* Video player */} +
+ +
+ + {/* Timeline */} +
+ +
+
+ + {/* Audio mixer sidebar */} +
+
+ +
+
+
+ + {/* Status bar */} +
+
+
+ {settings.cutlist.length} clips selected •{" "} + {settings.audioChannels.filter((c) => !c.muted).length} of{" "} + {settings.audioChannels.length} audio channels active +
+
+ {regenerating && "Generating preview..."} + {saving && "Saving changes..."} + {!regenerating && !saving && hasChanges && "Changes pending"} +
+
+
+
+ ); +} diff --git a/src/index.ts b/src/index.ts index 5d4b2c6..4684262 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,8 +1,10 @@ import "material-symbols/outlined.css"; import "./index.css"; +import AudioChannelNameEditor from "@/components/atoms/AudioChannelNameEditor"; +import VideoPreview from "@/components/organisms/VideoPreview"; import VideoSelectionPage from "@/components/pages/VideoSelectionPage"; -export { VideoSelectionPage }; +export { VideoSelectionPage, VideoPreview, AudioChannelNameEditor }; export type * from "./types"; diff --git a/src/types.ts b/src/types.ts index ee4e176..c8fc776 100644 --- a/src/types.ts +++ b/src/types.ts @@ -121,3 +121,59 @@ export type VideoClip = { */ keyframeSrc?: string; }; + +/** + * Audio channel information for preview + */ +export interface AudioChannel { + id: string; + name: string; + /** + * Audio level from 0.0 to 1.0 + */ + level: number; + /** + * Whether this channel is muted + */ + muted: boolean; +} + +/** + * Waveform data for visualization + */ +export interface WaveformData { + /** + * Audio channel ID this waveform belongs to + */ + channelId: string; + /** + * Array of amplitude values for visualization + */ + amplitudes: number[]; + /** + * Duration this waveform data represents in milliseconds + */ + duration: number; + /** + * Sample rate of the waveform data + */ + sampleRate: number; +} + +/** + * Preview settings for video rendering + */ +export interface PreviewSettings { + /** + * Selected cutlist for preview + */ + cutlist: VideoClip[]; + /** + * Audio channel configurations + */ + audioChannels: AudioChannel[]; + /** + * Waveform data for each channel + */ + waveformData: WaveformData[]; +} diff --git a/src/utils/audioChannels.ts b/src/utils/audioChannels.ts new file mode 100644 index 0000000..f781f22 --- /dev/null +++ b/src/utils/audioChannels.ts @@ -0,0 +1,9 @@ +import type { AudioChannel } from "@/types"; + +export function hasAudioChanges(channels: AudioChannel[]): boolean { + return channels.some((channel) => channel.level !== 1.0 || channel.muted); +} + +export function isAllMuted(channels: AudioChannel[]): boolean { + return channels.every((channel) => channel.muted); +}