feat: so much stuff

This commit is contained in:
Maze Winther
2025-06-22 19:28:03 +02:00
parent e22aa6620c
commit 6ee16f9df8
9 changed files with 1229 additions and 83 deletions

View File

@ -4,25 +4,76 @@ import { Button } from "../ui/button";
import { AspectRatio } from "../ui/aspect-ratio";
import { DragOverlay } from "../ui/drag-overlay";
import { useMediaStore } from "@/stores/media-store";
import { Plus, Image, Video, Music } from "lucide-react";
import { processMediaFiles } from "@/lib/media-processing";
import { Plus, Image, Video, Music, Trash2, Upload } from "lucide-react";
import { useDragDrop } from "@/hooks/use-drag-drop";
import { useRef, useState } from "react";
import { toast } from "sonner";
export function MediaPanel() {
const { mediaItems, addMediaItem } = useMediaStore();
const { mediaItems, addMediaItem, removeMediaItem } = useMediaStore();
const fileInputRef = useRef<HTMLInputElement>(null);
const [isProcessing, setIsProcessing] = useState(false);
const processFiles = async (files: FileList | File[]) => {
setIsProcessing(true);
try {
const processedItems = await processMediaFiles(files);
for (const processedItem of processedItems) {
addMediaItem(processedItem);
toast.success(`Added ${processedItem.name} to project`);
}
} catch (error) {
console.error("Error processing files:", error);
toast.error("Failed to process files");
} finally {
setIsProcessing(false);
}
};
const { isDragOver, dragProps } = useDragDrop({
onDrop: (files) => {
// TODO: Handle file drop functionality
console.log("Files dropped on media panel:", files);
processFiles(files);
},
});
const handleAddSampleMedia = () => {
// Just for testing - add a sample media item
addMediaItem({
name: `Sample ${mediaItems.length + 1}`,
type: "image",
});
const handleFileSelect = () => {
fileInputRef.current?.click();
};
const handleFileInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
if (e.target.files && e.target.files.length > 0) {
processFiles(e.target.files);
// Reset the input so the same file can be selected again
e.target.value = "";
}
};
const handleRemoveItem = (e: React.MouseEvent, itemId: string) => {
e.stopPropagation();
removeMediaItem(itemId);
toast.success("Media removed from project");
};
const handleDragStart = (e: React.DragEvent, item: any) => {
// Mark this as an internal app drag
e.dataTransfer.setData(
"application/x-media-item",
JSON.stringify({
id: item.id,
type: item.type,
name: item.name,
})
);
e.dataTransfer.effectAllowed = "copy";
};
const formatDuration = (duration: number) => {
const minutes = Math.floor(duration / 60);
const seconds = Math.floor(duration % 60);
return `${minutes}:${seconds.toString().padStart(2, "0")}`;
};
const getMediaIcon = (type: string) => {
@ -36,63 +87,188 @@ export function MediaPanel() {
}
};
return (
<div
className={`h-full overflow-y-auto transition-colors duration-200 relative ${
isDragOver ? "bg-accent/30 border-accent" : ""
}`}
{...dragProps}
>
<DragOverlay isVisible={isDragOver} />
<div className="space-y-4 p-2 h-full">
{/* Media Grid */}
{mediaItems.length === 0 ? (
<EmptyMedia onAddSample={handleAddSampleMedia} />
const renderMediaPreview = (item: any) => {
switch (item.type) {
case "image":
return (
<img
src={item.url}
alt={item.name}
className="w-full h-full object-cover rounded cursor-grab active:cursor-grabbing"
loading="lazy"
draggable={true}
onDragStart={(e) => handleDragStart(e, item)}
/>
);
case "video":
return item.thumbnailUrl ? (
<div className="relative w-full h-full">
<img
src={item.thumbnailUrl}
alt={item.name}
className="w-full h-full object-cover rounded cursor-grab active:cursor-grabbing"
loading="lazy"
draggable={true}
onDragStart={(e) => handleDragStart(e, item)}
/>
<div className="absolute inset-0 flex items-center justify-center bg-black/20 rounded">
<Video className="h-6 w-6 text-white drop-shadow-md" />
</div>
{item.duration && (
<div className="absolute bottom-1 right-1 bg-black/70 text-white text-xs px-1 rounded">
{formatDuration(item.duration)}
</div>
)}
</div>
) : (
<div className="grid grid-cols-2 gap-2">
{mediaItems.map((item) => (
<Button
key={item.id}
variant="outline"
className="flex flex-col gap-2 p-2 h-auto overflow-hidden"
>
<AspectRatio ratio={16 / 9} className="w-full">
<div className="w-full h-full bg-muted/30 flex flex-col items-center justify-center text-muted-foreground">
{getMediaIcon(item.type)}
<span className="text-xs mt-1 truncate max-w-full px-1">
<div
className="w-full h-full bg-muted/30 flex flex-col items-center justify-center text-muted-foreground rounded cursor-grab active:cursor-grabbing"
draggable={true}
onDragStart={(e) => handleDragStart(e, item)}
>
<Video className="h-6 w-6 mb-1" />
<span className="text-xs">Video</span>
{item.duration && (
<span className="text-xs opacity-70">
{formatDuration(item.duration)}
</span>
)}
</div>
);
case "audio":
return (
<div
className="w-full h-full bg-gradient-to-br from-green-500/20 to-emerald-500/20 flex flex-col items-center justify-center text-muted-foreground rounded border border-green-500/20 cursor-grab active:cursor-grabbing"
draggable={true}
onDragStart={(e) => handleDragStart(e, item)}
>
<Music className="h-6 w-6 mb-1" />
<span className="text-xs">Audio</span>
{item.duration && (
<span className="text-xs opacity-70">
{formatDuration(item.duration)}
</span>
)}
</div>
);
default:
return (
<div
className="w-full h-full bg-muted/30 flex flex-col items-center justify-center text-muted-foreground rounded cursor-grab active:cursor-grabbing"
draggable={true}
onDragStart={(e) => handleDragStart(e, item)}
>
{getMediaIcon(item.type)}
<span className="text-xs mt-1">Unknown</span>
</div>
);
}
};
return (
<>
<input
ref={fileInputRef}
type="file"
accept="image/*,video/*,audio/*"
multiple
className="hidden"
onChange={handleFileInputChange}
/>
<div
className={`h-full overflow-y-auto transition-colors duration-200 relative ${
isDragOver ? "bg-accent/30 border-accent" : ""
}`}
{...dragProps}
>
<DragOverlay isVisible={isDragOver} />
<div className="space-y-4 p-2 h-full">
{/* Media Grid */}
{mediaItems.length === 0 ? (
<EmptyMedia
onFileSelect={handleFileSelect}
isProcessing={isProcessing}
/>
) : (
<div className="grid grid-cols-2 gap-2">
{mediaItems.map((item) => (
<div key={item.id} className="relative group">
<Button
variant="outline"
className="flex flex-col gap-2 p-2 h-auto overflow-hidden w-full relative"
>
<AspectRatio ratio={item.aspectRatio} className="w-full">
{renderMediaPreview(item)}
</AspectRatio>
<span className="text-xs truncate max-w-full px-1">
{item.name}
</span>
</Button>
{/* Remove button - positioned outside the button container */}
<div
className="absolute -top-2 -right-2 opacity-0 group-hover:opacity-100 transition-opacity z-20"
onDragStart={(e) => e.preventDefault()}
onDrag={(e) => e.preventDefault()}
>
<Button
variant="destructive"
size="icon"
className="h-6 w-6 pointer-events-auto"
onClick={(e) => handleRemoveItem(e, item.id)}
>
<Trash2 className="h-3 w-3" />
</Button>
</div>
</AspectRatio>
</Button>
))}
</div>
)}
</div>
))}
</div>
)}
</div>
</div>
</div>
</>
);
}
function EmptyMedia({ onAddSample }: { onAddSample: () => void }) {
function EmptyMedia({
onFileSelect,
isProcessing,
}: {
onFileSelect: () => void;
isProcessing: boolean;
}) {
return (
<div className="flex flex-col items-center justify-center py-8 text-center h-full">
<div className="w-16 h-16 rounded-full bg-muted/30 flex items-center justify-center mb-4">
<Image className="h-8 w-8 text-muted-foreground" />
{isProcessing ? (
<div className="animate-spin">
<Upload className="h-8 w-8 text-muted-foreground" />
</div>
) : (
<Image className="h-8 w-8 text-muted-foreground" />
)}
</div>
<p className="text-sm text-muted-foreground">No media in project</p>
<p className="text-xs text-muted-foreground/70 mt-1">
Drag files or click to add media
<p className="text-sm text-muted-foreground">
{isProcessing ? "Processing files..." : "No media in project"}
</p>
<Button
variant="outline"
size="sm"
className="mt-4"
onClick={onAddSample}
>
<Plus className="h-4 w-4 mr-2" />
Add Sample
</Button>
<p className="text-xs text-muted-foreground/70 mt-1">
{isProcessing
? "Please wait while files are being processed"
: "Drag files or click to add media"}
</p>
{!isProcessing && (
<Button
variant="outline"
size="sm"
className="mt-4"
onClick={onFileSelect}
>
<Plus className="h-4 w-4 mr-2" />
Add Media
</Button>
)}
</div>
);
}

View File

@ -1,11 +1,111 @@
"use client";
import { useTimelineStore } from "@/stores/timeline-store";
import { useMediaStore } from "@/stores/media-store";
import { ImageTimelineTreatment } from "@/components/ui/image-timeline-treatment";
import { Button } from "@/components/ui/button";
import { Play, Pause } from "lucide-react";
import { useState } from "react";
export function PreviewPanel() {
return (
<div className="h-full flex items-center justify-center">
<div className="aspect-video bg-black/90 w-full max-w-4xl mx-4 rounded-lg shadow-lg relative group">
const { tracks } = useTimelineStore();
const { mediaItems } = useMediaStore();
const [isPlaying, setIsPlaying] = useState(false);
// Get the first clip from the first track for preview (simplified for now)
const firstClip = tracks[0]?.clips[0];
const firstMediaItem = firstClip
? mediaItems.find((item) => item.id === firstClip.mediaId)
: null;
const renderPreviewContent = () => {
if (!firstMediaItem) {
return (
<div className="absolute inset-0 flex items-center justify-center text-muted-foreground/50 group-hover:text-muted-foreground/80 transition-colors">
Drop media here or click to import
</div>
);
}
if (firstMediaItem.type === "image") {
return (
<ImageTimelineTreatment
src={firstMediaItem.url}
alt={firstMediaItem.name}
targetAspectRatio={16 / 9}
className="w-full h-full rounded-lg"
backgroundType="blur"
/>
);
}
if (firstMediaItem.type === "video") {
return firstMediaItem.thumbnailUrl ? (
<img
src={firstMediaItem.thumbnailUrl}
alt={firstMediaItem.name}
className="w-full h-full object-cover rounded-lg"
/>
) : (
<div className="absolute inset-0 flex items-center justify-center text-muted-foreground/50">
Video Preview
</div>
);
}
if (firstMediaItem.type === "audio") {
return (
<div className="absolute inset-0 flex items-center justify-center bg-gradient-to-br from-green-500/20 to-emerald-500/20">
<div className="text-center">
<div className="text-6xl mb-4">🎵</div>
<p className="text-muted-foreground">{firstMediaItem.name}</p>
</div>
</div>
);
}
return null;
};
return (
<div className="h-full flex flex-col items-center justify-center p-4">
<div className="aspect-video bg-black/90 w-full max-w-4xl rounded-lg shadow-lg relative group overflow-hidden">
{renderPreviewContent()}
{/* Playback Controls Overlay */}
{firstMediaItem && (
<div className="absolute bottom-4 left-1/2 transform -translate-x-1/2 opacity-0 group-hover:opacity-100 transition-opacity">
<div className="flex items-center gap-2 bg-black/80 rounded-lg px-4 py-2">
<Button
variant="ghost"
size="icon"
className="text-white hover:bg-white/20"
onClick={() => setIsPlaying(!isPlaying)}
>
{isPlaying ? (
<Pause className="h-5 w-5" />
) : (
<Play className="h-5 w-5" />
)}
</Button>
<span className="text-white text-sm">
{firstClip?.name || "No clip selected"}
</span>
</div>
</div>
)}
</div>
{/* Preview Info */}
{firstMediaItem && (
<div className="mt-4 text-center">
<p className="text-sm text-muted-foreground">
Preview: {firstMediaItem.name}
{firstMediaItem.type === "image" &&
" (with CapCut-style treatment)"}
</p>
</div>
)}
</div>
);
}

View File

@ -5,11 +5,108 @@ import { Label } from "../ui/label";
import { Slider } from "../ui/slider";
import { ScrollArea } from "../ui/scroll-area";
import { Separator } from "../ui/separator";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "../ui/select";
import { useTimelineStore } from "@/stores/timeline-store";
import { useMediaStore } from "@/stores/media-store";
import { ImageTimelineTreatment } from "@/components/ui/image-timeline-treatment";
import { useState } from "react";
export function PropertiesPanel() {
const { tracks } = useTimelineStore();
const { mediaItems } = useMediaStore();
const [backgroundType, setBackgroundType] = useState<
"blur" | "mirror" | "color"
>("blur");
const [backgroundColor, setBackgroundColor] = useState("#000000");
// Get the first image clip for preview (simplified)
const firstImageClip = tracks
.flatMap((track) => track.clips)
.find((clip) => {
const mediaItem = mediaItems.find((item) => item.id === clip.mediaId);
return mediaItem?.type === "image";
});
const firstImageItem = firstImageClip
? mediaItems.find((item) => item.id === firstImageClip.mediaId)
: null;
return (
<ScrollArea className="h-full">
<div className="space-y-6 p-5">
{/* Image Treatment - only show if an image is selected */}
{firstImageItem && (
<>
<div className="space-y-4">
<h3 className="text-sm font-medium">Image Treatment</h3>
<div className="space-y-4">
{/* Preview */}
<div className="space-y-2">
<Label>Preview</Label>
<div className="w-full aspect-video max-w-48">
<ImageTimelineTreatment
src={firstImageItem.url}
alt={firstImageItem.name}
targetAspectRatio={16 / 9}
className="rounded-sm border"
backgroundType={backgroundType}
backgroundColor={backgroundColor}
/>
</div>
</div>
{/* Background Type */}
<div className="space-y-2">
<Label htmlFor="bg-type">Background Type</Label>
<Select
value={backgroundType}
onValueChange={(value: any) => setBackgroundType(value)}
>
<SelectTrigger>
<SelectValue placeholder="Select background type" />
</SelectTrigger>
<SelectContent>
<SelectItem value="blur">Blur</SelectItem>
<SelectItem value="mirror">Mirror</SelectItem>
<SelectItem value="color">Solid Color</SelectItem>
</SelectContent>
</Select>
</div>
{/* Background Color - only show for color type */}
{backgroundType === "color" && (
<div className="space-y-2">
<Label htmlFor="bg-color">Background Color</Label>
<div className="flex gap-2">
<Input
id="bg-color"
type="color"
value={backgroundColor}
onChange={(e) => setBackgroundColor(e.target.value)}
className="w-16 h-10 p-1"
/>
<Input
value={backgroundColor}
onChange={(e) => setBackgroundColor(e.target.value)}
placeholder="#000000"
className="flex-1"
/>
</div>
</div>
)}
</div>
</div>
<Separator />
</>
)}
{/* Transform */}
<div className="space-y-4">
<h3 className="text-sm font-medium">Transform</h3>

View File

@ -19,17 +19,164 @@ import {
} from "../ui/tooltip";
import { DragOverlay } from "../ui/drag-overlay";
import { useTimelineStore, type TimelineTrack } from "@/stores/timeline-store";
import { useDragDrop } from "@/hooks/use-drag-drop";
import { useMediaStore } from "@/stores/media-store";
import { processMediaFiles } from "@/lib/media-processing";
import { ImageTimelineTreatment } from "@/components/ui/image-timeline-treatment";
import { toast } from "sonner";
import { useState, useRef } from "react";
export function Timeline() {
const { tracks, addTrack } = useTimelineStore();
const { tracks, addTrack, addClipToTrack } = useTimelineStore();
const { mediaItems, addMediaItem } = useMediaStore();
const [isDragOver, setIsDragOver] = useState(false);
const [isProcessing, setIsProcessing] = useState(false);
const dragCounterRef = useRef(0);
const { isDragOver, dragProps } = useDragDrop({
onDrop: (files) => {
// TODO: Handle file drop functionality for timeline
console.log("Files dropped on timeline:", files);
},
});
const handleDragEnter = (e: React.DragEvent) => {
e.preventDefault();
// Don't show overlay for timeline clips or other internal drags
if (e.dataTransfer.types.includes("application/x-timeline-clip")) {
return;
}
dragCounterRef.current += 1;
if (!isDragOver) {
setIsDragOver(true);
}
};
const handleDragOver = (e: React.DragEvent) => {
e.preventDefault();
};
const handleDragLeave = (e: React.DragEvent) => {
e.preventDefault();
// Don't update state for timeline clips
if (e.dataTransfer.types.includes("application/x-timeline-clip")) {
return;
}
dragCounterRef.current -= 1;
if (dragCounterRef.current === 0) {
setIsDragOver(false);
}
};
const handleDrop = async (e: React.DragEvent) => {
e.preventDefault();
setIsDragOver(false);
dragCounterRef.current = 0;
// Check if this is a timeline clip drop - now we'll handle it!
const timelineClipData = e.dataTransfer.getData(
"application/x-timeline-clip"
);
if (timelineClipData) {
// Timeline clips dropped on the main timeline area (not on a specific track)
// For now, we'll just ignore these - clips should be dropped on specific tracks
return;
}
// Check if this is an internal media item drop
const mediaItemData = e.dataTransfer.getData("application/x-media-item");
if (mediaItemData) {
try {
const { id, type, name } = JSON.parse(mediaItemData);
// Find the full media item from the store
const mediaItem = mediaItems.find((item) => item.id === id);
if (!mediaItem) {
toast.error("Media item not found");
return;
}
// Determine track type based on media type
let trackType: "video" | "audio" | "effects";
if (type === "video") {
trackType = "video";
} else if (type === "audio") {
trackType = "audio";
} else {
// For images, we'll put them on video tracks
trackType = "video";
}
// Create a new track and get its ID
const newTrackId = addTrack(trackType);
// Add the clip to the new track
addClipToTrack(newTrackId, {
mediaId: mediaItem.id,
name: mediaItem.name,
duration: mediaItem.duration || 5, // Default 5 seconds for images
});
toast.success(`Added ${name} to ${trackType} track`);
} catch (error) {
console.error("Error parsing media item data:", error);
toast.error("Failed to add media to timeline");
}
} else if (e.dataTransfer.files && e.dataTransfer.files.length > 0) {
// Handle external file drops
setIsProcessing(true);
try {
const processedItems = await processMediaFiles(e.dataTransfer.files);
for (const processedItem of processedItems) {
// Add to media store first
addMediaItem(processedItem);
// The media item now has an ID, let's get it from the latest state
// Since addMediaItem is synchronous, we can get the latest item
const currentMediaItems = useMediaStore.getState().mediaItems;
const addedItem = currentMediaItems.find(
(item) =>
item.name === processedItem.name && item.url === processedItem.url
);
if (addedItem) {
// Determine track type based on media type
let trackType: "video" | "audio" | "effects";
if (processedItem.type === "video") {
trackType = "video";
} else if (processedItem.type === "audio") {
trackType = "audio";
} else {
// For images, we'll put them on video tracks
trackType = "video";
}
// Create a new track and get its ID
const newTrackId = addTrack(trackType);
// Add the clip to the new track
addClipToTrack(newTrackId, {
mediaId: addedItem.id,
name: addedItem.name,
duration: addedItem.duration || 5, // Default 5 seconds for images
});
toast.success(`Added ${processedItem.name} to timeline`);
}
}
} catch (error) {
console.error("Error processing external files:", error);
toast.error("Failed to process dropped files");
} finally {
setIsProcessing(false);
}
}
};
const dragProps = {
onDragEnter: handleDragEnter,
onDragOver: handleDragOver,
onDragLeave: handleDragLeave,
onDrop: handleDrop,
};
return (
<div
@ -40,8 +187,12 @@ export function Timeline() {
>
<DragOverlay
isVisible={isDragOver}
title="Drop files here"
description="Add media to timeline tracks"
title={isProcessing ? "Processing files..." : "Drop media here"}
description={
isProcessing
? "Please wait while files are being processed"
: "Add media to timeline tracks"
}
/>
{/* Toolbar */}
@ -154,6 +305,149 @@ export function Timeline() {
}
function TimelineTrackComponent({ track }: { track: TimelineTrack }) {
const { mediaItems } = useMediaStore();
const { moveClipToTrack, reorderClipInTrack } = useTimelineStore();
const [isDropping, setIsDropping] = useState(false);
const handleClipDragStart = (e: React.DragEvent, clip: any) => {
// Mark this as an timeline clip drag to differentiate from media items
const dragData = {
clipId: clip.id,
trackId: track.id,
name: clip.name,
};
e.dataTransfer.setData(
"application/x-timeline-clip",
JSON.stringify(dragData)
);
e.dataTransfer.effectAllowed = "move";
// Use the entire clip container as the drag image instead of just the content
const target = e.currentTarget as HTMLElement;
e.dataTransfer.setDragImage(
target,
target.offsetWidth / 2,
target.offsetHeight / 2
);
};
const handleTrackDragOver = (e: React.DragEvent) => {
e.preventDefault();
// Only handle timeline clip drags
if (!e.dataTransfer.types.includes("application/x-timeline-clip")) {
return;
}
e.dataTransfer.dropEffect = "move";
};
const handleTrackDragEnter = (e: React.DragEvent) => {
e.preventDefault();
// Only handle timeline clip drags
if (!e.dataTransfer.types.includes("application/x-timeline-clip")) {
return;
}
setIsDropping(true);
};
const handleTrackDragLeave = (e: React.DragEvent) => {
e.preventDefault();
// Only handle timeline clip drags
if (!e.dataTransfer.types.includes("application/x-timeline-clip")) {
return;
}
// Check if we're actually leaving the track area
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX;
const y = e.clientY;
const isActuallyLeaving =
x < rect.left || x > rect.right || y < rect.top || y > rect.bottom;
if (isActuallyLeaving) {
setIsDropping(false);
}
};
const handleTrackDrop = (e: React.DragEvent) => {
e.preventDefault();
setIsDropping(false);
// Only handle timeline clip drags
if (!e.dataTransfer.types.includes("application/x-timeline-clip")) {
return;
}
const timelineClipData = e.dataTransfer.getData(
"application/x-timeline-clip"
);
if (!timelineClipData) {
return;
}
try {
const parsedData = JSON.parse(timelineClipData);
const { clipId, trackId: fromTrackId } = parsedData;
// Calculate where to insert the clip based on mouse position
const trackContainer = e.currentTarget.querySelector(
".track-clips-container"
) as HTMLElement;
if (!trackContainer) {
return;
}
const rect = trackContainer.getBoundingClientRect();
const mouseX = e.clientX - rect.left;
// Calculate insertion index based on position
let insertIndex = 0;
const clipElements = trackContainer.querySelectorAll(".timeline-clip");
for (let i = 0; i < clipElements.length; i++) {
const clipRect = clipElements[i].getBoundingClientRect();
const clipCenterX = clipRect.left + clipRect.width / 2 - rect.left;
if (mouseX > clipCenterX) {
insertIndex = i + 1;
} else {
break;
}
}
if (fromTrackId === track.id) {
// Moving within the same track - reorder
const currentIndex = track.clips.findIndex(
(clip) => clip.id === clipId
);
if (currentIndex !== -1 && currentIndex !== insertIndex) {
// Adjust index if we're moving to a position after the current one
const adjustedIndex =
insertIndex > currentIndex ? insertIndex - 1 : insertIndex;
reorderClipInTrack(track.id, clipId, adjustedIndex);
toast.success("Clip reordered");
}
} else {
// Moving between different tracks
moveClipToTrack(fromTrackId, track.id, clipId, insertIndex);
toast.success("Clip moved to different track");
}
} catch (error) {
console.error("Error moving clip:", error);
toast.error("Failed to move clip");
}
};
const getTrackColor = (type: string) => {
switch (type) {
case "video":
@ -167,26 +461,93 @@ function TimelineTrackComponent({ track }: { track: TimelineTrack }) {
}
};
const renderClipContent = (clip: any) => {
const mediaItem = mediaItems.find((item) => item.id === clip.mediaId);
if (!mediaItem) {
return (
<span className="text-xs text-foreground/80 truncate">{clip.name}</span>
);
}
if (mediaItem.type === "image") {
return (
<div className="w-full h-full flex items-center gap-2">
<div className="w-16 h-12 flex-shrink-0">
<ImageTimelineTreatment
src={mediaItem.url}
alt={mediaItem.name}
targetAspectRatio={16 / 9}
className="rounded-sm"
backgroundType="mirror"
/>
</div>
<span className="text-xs text-foreground/80 truncate flex-1">
{clip.name}
</span>
</div>
);
}
if (mediaItem.type === "video" && mediaItem.thumbnailUrl) {
return (
<div className="w-full h-full flex items-center gap-2">
<div className="w-8 h-8 flex-shrink-0">
<img
src={mediaItem.thumbnailUrl}
alt={mediaItem.name}
className="w-full h-full object-cover rounded-sm"
/>
</div>
<span className="text-xs text-foreground/80 truncate flex-1">
{clip.name}
</span>
</div>
);
}
// Fallback for audio or videos without thumbnails
return (
<span className="text-xs text-foreground/80 truncate">{clip.name}</span>
);
};
return (
<div className="flex items-center px-2">
<div className="w-24 text-xs text-muted-foreground flex-shrink-0 mr-2">
{track.name}
</div>
<div className="flex-1 h-[60px]">
{track.clips.length === 0 ? (
<div className="h-full rounded-sm border-2 border-dashed border-muted/30 flex items-center justify-center text-xs text-muted-foreground">
Drop media here
</div>
) : (
<div
className={`h-full rounded-sm border cursor-pointer transition-colors ${getTrackColor(track.type)} flex items-center px-2`}
>
<span className="text-xs text-foreground/80">
{track.clips.length} clip{track.clips.length !== 1 ? "s" : ""}
</span>
</div>
)}
<div
className={`flex-1 h-[60px] transition-colors ${
isDropping ? "bg-accent/50 border-2 border-dashed border-accent" : ""
}`}
onDragOver={handleTrackDragOver}
onDragEnter={handleTrackDragEnter}
onDragLeave={handleTrackDragLeave}
onDrop={handleTrackDrop}
>
<div className="h-full flex gap-1 track-clips-container">
{track.clips.length === 0 ? (
<div className="h-full w-full rounded-sm border-2 border-dashed border-muted/30 flex items-center justify-center text-xs text-muted-foreground">
Drop media here
</div>
) : (
track.clips.map((clip, index) => (
<div
key={clip.id}
className={`timeline-clip h-full rounded-sm border cursor-grab active:cursor-grabbing transition-colors ${getTrackColor(track.type)} flex items-center px-2 min-w-[80px] overflow-hidden`}
style={{
width: `${Math.max(80, (clip.duration / 30) * 400)}px`,
}}
draggable={true}
onDragStart={(e) => handleClipDragStart(e, clip)}
>
{renderClipContent(clip)}
</div>
))
)}
</div>
</div>
</div>
);

View File

@ -0,0 +1,101 @@
"use client";
import { useState } from "react";
import { cn } from "@/lib/utils";
interface ImageTimelineTreatmentProps {
src: string;
alt: string;
targetAspectRatio?: number; // Default to 16:9 for video
className?: string;
backgroundType?: "blur" | "mirror" | "color";
backgroundColor?: string;
}
export function ImageTimelineTreatment({
src,
alt,
targetAspectRatio = 16 / 9,
className,
backgroundType = "blur",
backgroundColor = "#000000",
}: ImageTimelineTreatmentProps) {
const [imageLoaded, setImageLoaded] = useState(false);
const [imageDimensions, setImageDimensions] = useState<{
width: number;
height: number;
} | null>(null);
const handleImageLoad = (e: React.SyntheticEvent<HTMLImageElement>) => {
const img = e.currentTarget;
setImageDimensions({
width: img.naturalWidth,
height: img.naturalHeight,
});
setImageLoaded(true);
};
const imageAspectRatio = imageDimensions
? imageDimensions.width / imageDimensions.height
: 1;
const needsAspectRatioTreatment = imageAspectRatio !== targetAspectRatio;
return (
<div
className={cn("relative overflow-hidden", className)}
style={{ aspectRatio: targetAspectRatio }}
>
{/* Background Layer */}
{needsAspectRatioTreatment && imageLoaded && (
<>
{backgroundType === "blur" && (
<div className="absolute inset-0">
<img
src={src}
alt=""
className="w-full h-full object-cover filter blur-xl scale-110 opacity-60"
aria-hidden="true"
/>
<div className="absolute inset-0 bg-black/20" />
</div>
)}
{backgroundType === "mirror" && (
<div className="absolute inset-0">
<img
src={src}
alt=""
className="w-full h-full object-cover opacity-30"
aria-hidden="true"
/>
</div>
)}
{backgroundType === "color" && (
<div className="absolute inset-0" style={{ backgroundColor }} />
)}
</>
)}
{/* Main Image Layer */}
<div className="absolute inset-0">
<img
src={src}
alt={alt}
className="w-full h-full object-cover"
onLoad={handleImageLoad}
/>
</div>
{/* Loading state */}
{!imageLoaded && (
<div className="absolute inset-0 flex items-center justify-center bg-muted/30">
<div className="animate-pulse text-xs text-muted-foreground">
Loading...
</div>
</div>
)}
</div>
);
}

View File

@ -4,12 +4,29 @@ interface UseDragDropOptions {
onDrop?: (files: FileList) => void;
}
// Helper function to check if drag contains files from external sources (not internal app drags)
const containsFiles = (dataTransfer: DataTransfer): boolean => {
// Check if this is an internal app drag (media item)
if (dataTransfer.types.includes("application/x-media-item")) {
return false;
}
// Only show overlay for external file drags
return dataTransfer.types.includes("Files");
};
export function useDragDrop(options: UseDragDropOptions = {}) {
const [isDragOver, setIsDragOver] = useState(false);
const dragCounterRef = useRef(0);
const handleDragEnter = (e: React.DragEvent) => {
e.preventDefault();
// Only handle external file drags, not internal app element drags
if (!containsFiles(e.dataTransfer)) {
return;
}
dragCounterRef.current += 1;
if (!isDragOver) {
setIsDragOver(true);
@ -18,10 +35,21 @@ export function useDragDrop(options: UseDragDropOptions = {}) {
const handleDragOver = (e: React.DragEvent) => {
e.preventDefault();
// Only handle file drags
if (!containsFiles(e.dataTransfer)) {
return;
}
};
const handleDragLeave = (e: React.DragEvent) => {
e.preventDefault();
// Only handle file drags
if (!containsFiles(e.dataTransfer)) {
return;
}
dragCounterRef.current -= 1;
if (dragCounterRef.current === 0) {
setIsDragOver(false);
@ -33,7 +61,12 @@ export function useDragDrop(options: UseDragDropOptions = {}) {
setIsDragOver(false);
dragCounterRef.current = 0;
if (options.onDrop && e.dataTransfer.files) {
// Only handle file drops
if (
options.onDrop &&
e.dataTransfer.files &&
containsFiles(e.dataTransfer)
) {
options.onDrop(e.dataTransfer.files);
}
};

View File

@ -0,0 +1,67 @@
import { toast } from "sonner";
import {
getFileType,
generateVideoThumbnail,
getMediaDuration,
getImageAspectRatio,
type MediaItem,
} from "@/stores/media-store";
export interface ProcessedMediaItem extends Omit<MediaItem, "id"> {}
export async function processMediaFiles(
files: FileList | File[]
): Promise<ProcessedMediaItem[]> {
const fileArray = Array.from(files);
const processedItems: ProcessedMediaItem[] = [];
for (const file of fileArray) {
const fileType = getFileType(file);
if (!fileType) {
toast.error(`Unsupported file type: ${file.name}`);
continue;
}
const url = URL.createObjectURL(file);
let thumbnailUrl: string | undefined;
let duration: number | undefined;
let aspectRatio: number = 16 / 9; // Default fallback
try {
if (fileType === "image") {
// Get image aspect ratio
aspectRatio = await getImageAspectRatio(file);
} else if (fileType === "video") {
// Generate thumbnail and get aspect ratio for videos
const videoResult = await generateVideoThumbnail(file);
thumbnailUrl = videoResult.thumbnailUrl;
aspectRatio = videoResult.aspectRatio;
} else if (fileType === "audio") {
// For audio, use a square aspect ratio
aspectRatio = 1;
}
// Get duration for videos and audio
if (fileType === "video" || fileType === "audio") {
duration = await getMediaDuration(file);
}
processedItems.push({
name: file.name,
type: fileType,
file,
url,
thumbnailUrl,
duration,
aspectRatio,
});
} catch (error) {
console.error("Error processing file:", file.name, error);
toast.error(`Failed to process ${file.name}`);
URL.revokeObjectURL(url); // Clean up on error
}
}
return processedItems;
}

View File

@ -4,6 +4,11 @@ export interface MediaItem {
id: string;
name: string;
type: "image" | "video" | "audio";
file: File;
url: string; // Object URL for preview
thumbnailUrl?: string; // For video thumbnails
duration?: number; // For video/audio duration
aspectRatio: number; // width / height
}
interface MediaStore {
@ -12,8 +17,113 @@ interface MediaStore {
// Actions
addMediaItem: (item: Omit<MediaItem, "id">) => void;
removeMediaItem: (id: string) => void;
clearAllMedia: () => void;
}
// Helper function to determine file type
export const getFileType = (file: File): "image" | "video" | "audio" | null => {
const { type } = file;
if (type.startsWith("image/")) {
return "image";
}
if (type.startsWith("video/")) {
return "video";
}
if (type.startsWith("audio/")) {
return "audio";
}
return null;
};
// Helper function to get image aspect ratio
export const getImageAspectRatio = (file: File): Promise<number> => {
return new Promise((resolve, reject) => {
const img = new Image();
img.addEventListener("load", () => {
const aspectRatio = img.naturalWidth / img.naturalHeight;
resolve(aspectRatio);
img.remove();
});
img.addEventListener("error", () => {
reject(new Error("Could not load image"));
img.remove();
});
img.src = URL.createObjectURL(file);
});
};
// Helper function to generate video thumbnail and get aspect ratio
export const generateVideoThumbnail = (
file: File
): Promise<{ thumbnailUrl: string; aspectRatio: number }> => {
return new Promise((resolve, reject) => {
const video = document.createElement("video");
const canvas = document.createElement("canvas");
const ctx = canvas.getContext("2d");
if (!ctx) {
reject(new Error("Could not get canvas context"));
return;
}
video.addEventListener("loadedmetadata", () => {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
// Seek to 1 second or 10% of duration, whichever is smaller
video.currentTime = Math.min(1, video.duration * 0.1);
});
video.addEventListener("seeked", () => {
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
const thumbnailUrl = canvas.toDataURL("image/jpeg", 0.8);
const aspectRatio = video.videoWidth / video.videoHeight;
resolve({ thumbnailUrl, aspectRatio });
// Cleanup
video.remove();
canvas.remove();
});
video.addEventListener("error", () => {
reject(new Error("Could not load video"));
video.remove();
canvas.remove();
});
video.src = URL.createObjectURL(file);
video.load();
});
};
// Helper function to get media duration
export const getMediaDuration = (file: File): Promise<number> => {
return new Promise((resolve, reject) => {
const element = document.createElement(
file.type.startsWith("video/") ? "video" : "audio"
) as HTMLVideoElement | HTMLAudioElement;
element.addEventListener("loadedmetadata", () => {
resolve(element.duration);
element.remove();
});
element.addEventListener("error", () => {
reject(new Error("Could not load media"));
element.remove();
});
element.src = URL.createObjectURL(file);
element.load();
});
};
export const useMediaStore = create<MediaStore>((set, get) => ({
mediaItems: [],
@ -28,8 +138,33 @@ export const useMediaStore = create<MediaStore>((set, get) => ({
},
removeMediaItem: (id) => {
const state = get();
const item = state.mediaItems.find((item) => item.id === id);
// Cleanup object URLs to prevent memory leaks
if (item) {
URL.revokeObjectURL(item.url);
if (item.thumbnailUrl) {
URL.revokeObjectURL(item.thumbnailUrl);
}
}
set((state) => ({
mediaItems: state.mediaItems.filter((item) => item.id !== id),
}));
},
clearAllMedia: () => {
const state = get();
// Cleanup all object URLs
state.mediaItems.forEach((item) => {
URL.revokeObjectURL(item.url);
if (item.thumbnailUrl) {
URL.revokeObjectURL(item.thumbnailUrl);
}
});
set({ mediaItems: [] });
},
}));

View File

@ -18,9 +18,21 @@ interface TimelineStore {
tracks: TimelineTrack[];
// Actions
addTrack: (type: "video" | "audio" | "effects") => void;
addTrack: (type: "video" | "audio" | "effects") => string;
removeTrack: (trackId: string) => void;
addClipToTrack: (trackId: string, clip: Omit<TimelineClip, "id">) => void;
removeClipFromTrack: (trackId: string, clipId: string) => void;
moveClipToTrack: (
fromTrackId: string,
toTrackId: string,
clipId: string,
insertIndex?: number
) => void;
reorderClipInTrack: (
trackId: string,
clipId: string,
newIndex: number
) => void;
}
export const useTimelineStore = create<TimelineStore>((set) => ({
@ -36,6 +48,7 @@ export const useTimelineStore = create<TimelineStore>((set) => ({
set((state) => ({
tracks: [...state.tracks, newTrack],
}));
return newTrack.id;
},
removeTrack: (trackId) => {
@ -58,4 +71,67 @@ export const useTimelineStore = create<TimelineStore>((set) => ({
),
}));
},
removeClipFromTrack: (trackId, clipId) => {
set((state) => ({
tracks: state.tracks.map((track) =>
track.id === trackId
? {
...track,
clips: track.clips.filter((clip) => clip.id !== clipId),
}
: track
),
}));
},
moveClipToTrack: (fromTrackId, toTrackId, clipId, insertIndex) => {
set((state) => {
// Find the clip to move
const fromTrack = state.tracks.find((track) => track.id === fromTrackId);
const clipToMove = fromTrack?.clips.find((clip) => clip.id === clipId);
if (!clipToMove) return state;
return {
tracks: state.tracks.map((track) => {
if (track.id === fromTrackId) {
// Remove clip from source track
return {
...track,
clips: track.clips.filter((clip) => clip.id !== clipId),
};
} else if (track.id === toTrackId) {
// Add clip to destination track
const newClips = [...track.clips];
const index =
insertIndex !== undefined ? insertIndex : newClips.length;
newClips.splice(index, 0, clipToMove);
return {
...track,
clips: newClips,
};
}
return track;
}),
};
});
},
reorderClipInTrack: (trackId, clipId, newIndex) => {
set((state) => ({
tracks: state.tracks.map((track) => {
if (track.id !== trackId) return track;
const clipIndex = track.clips.findIndex((clip) => clip.id === clipId);
if (clipIndex === -1) return track;
const newClips = [...track.clips];
const [movedClip] = newClips.splice(clipIndex, 1);
newClips.splice(newIndex, 0, movedClip);
return { ...track, clips: newClips };
}),
}));
},
}));