diff --git a/content/docs/NOTES.md b/content/docs/NOTES.md index 39180ee04..faab2ca24 100644 --- a/content/docs/NOTES.md +++ b/content/docs/NOTES.md @@ -163,4 +163,4 @@ Abort trap: 6 How can I interpret my data more intuitively? We recommend using [TablePlus](https://tableplus.com/) to open the SQLite database located alongside the data. - \ No newline at end of file + diff --git a/screenpipe-app-tauri/app/timeline/page.tsx b/screenpipe-app-tauri/app/timeline/page.tsx index d88d252be..4c66c800c 100644 --- a/screenpipe-app-tauri/app/timeline/page.tsx +++ b/screenpipe-app-tauri/app/timeline/page.tsx @@ -75,16 +75,18 @@ export default function Timeline() { }, []); const setupEventSource = () => { + console.log("timeline fps:", settings.timelineFps); if (eventSourceRef.current) { eventSourceRef.current.close(); } + const target_fps = settings.timelineFps; const endTime = new Date(); endTime.setMinutes(endTime.getMinutes() - 2); const startTime = new Date(); startTime.setHours(0, 1, 0, 0); - const url = `http://localhost:3030/stream/frames?start_time=${startTime.toISOString()}&end_time=${endTime.toISOString()}&order=descending`; + const url = `http://localhost:3030/stream/frames?start_time=${startTime.toISOString()}&end_time=${endTime.toISOString()}&order=descending&target_fps=${target_fps}`; setLoadedTimeRange({ start: startTime, @@ -181,7 +183,7 @@ export default function Timeline() { setIsLoading(false); setError(null); }; - }, []); + }, [settings.timelineFps]); const handleScroll = useMemo( () => diff --git a/screenpipe-app-tauri/components/recording-settings.tsx b/screenpipe-app-tauri/components/recording-settings.tsx index b247633d0..7a92237b8 100644 --- a/screenpipe-app-tauri/components/recording-settings.tsx +++ b/screenpipe-app-tauri/components/recording-settings.tsx @@ -104,6 +104,7 @@ export function RecordingSettings({ > | null>(null); const [windowsForIgnore, setWindowsForIgnore] = useState(""); const [windowsForInclude, setWindowsForInclude] = useState(""); + const [isTimelineEnabled, setIsTimelineEnabled] = useState(localSettings.enableFrameCache); const [availableMonitors, setAvailableMonitors] = useState( [] @@ -229,6 +230,7 @@ export function RecordingSettings({ includedWindows: localSettings.includedWindows, deepgramApiKey: localSettings.deepgramApiKey, fps: localSettings.fps, + timelineFps: localSettings.timelineFps, vadSensitivity: localSettings.vadSensitivity, audioChunkDuration: localSettings.audioChunkDuration, analyticsEnabled: localSettings.analyticsEnabled, @@ -384,6 +386,10 @@ export function RecordingSettings({ setLocalSettings({ ...localSettings, fps: value[0] }); }; + const handleTimelineFpsChange = (value: number[]) => { + setLocalSettings({ ...localSettings, timelineFps: value[0] }); + }; + const handleVadSensitivityChange = (value: number[]) => { const sensitivityMap: { [key: number]: VadSensitivity } = { 2: "high", @@ -583,6 +589,7 @@ export function RecordingSettings({ }; const handleFrameCacheToggle = (checked: boolean) => { + setIsTimelineEnabled(checked); setLocalSettings({ ...localSettings, enableFrameCache: checked, @@ -1469,7 +1476,7 @@ export function RecordingSettings({
+ {isTimelineEnabled && ( +
+ + + {localSettings.timelineFps.toFixed(1)} + + +
+ )} {isMacOS && (
anyhow::Result<()> { // Ensure 'setx' command can handle the new PATH length if current_path.len() + new_path.to_str().unwrap_or("").len() + 1 > 1024 { - return Err(anyhow::anyhow!( - "the PATH is too long to persist using 'setx'. please shorten the PATH." - )); + debug!("the PATH is too long to persist using 'setx'. please shorten the PATH."); + return Ok(()); } // Construct the new PATH string diff --git a/screenpipe-server/src/server.rs b/screenpipe-server/src/server.rs index a007da719..5ab1aceae 100644 --- a/screenpipe-server/src/server.rs +++ b/screenpipe-server/src/server.rs @@ -1219,6 +1219,7 @@ enum Order { pub struct StreamFramesRequest { start_time: DateTime, end_time: DateTime, + target_fps: Option, // #[serde(rename = "order")] // #[serde(default = "descending")] // order: Order, @@ -1370,9 +1371,10 @@ async fn stream_frames_handler( // Spawn frame extraction task using get_frames tokio::spawn({ let frame_tx = frame_tx.clone(); + let target_fps = request.target_fps.unwrap_or(0.1); async move { tokio::select! { - result = cache.get_frames(center_timestamp, duration_minutes, frame_tx.clone(), true) => { + result = cache.get_frames(center_timestamp, duration_minutes, frame_tx.clone(), true, target_fps) => { if let Err(e) = result { error!("frame extraction failed: {}", e); // Send error to client diff --git a/screenpipe-server/src/video_cache.rs b/screenpipe-server/src/video_cache.rs index 6f7113178..19ab8ab7f 100644 --- a/screenpipe-server/src/video_cache.rs +++ b/screenpipe-server/src/video_cache.rs @@ -458,6 +458,7 @@ impl FrameCache { start_time: DateTime, end_time: DateTime, frame_tx: FrameChannel, + target_fps: f64, ) -> Result<()> { let mut extraction_queue = HashMap::new(); let mut total_frames = 0; @@ -545,6 +546,7 @@ impl FrameCache { tasks, frame_tx.clone(), self.cache_tx.clone(), + target_fps, ) .await?; total_frames += extracted; @@ -561,6 +563,7 @@ impl FrameCache { duration_minutes: i64, frame_tx: Sender, descending: bool, + target_fps: f64, ) -> Result<()> { let start = timestamp - Duration::minutes(duration_minutes / 2); let end = timestamp + Duration::minutes(duration_minutes / 2); @@ -575,7 +578,7 @@ impl FrameCache { let cache_clone = self.clone(); tokio::spawn(async move { let result = cache_clone - .extract_frames_batch(start, end, extract_tx) + .extract_frames_batch(start, end, extract_tx, target_fps) .await; debug!("extraction task completed: {:?}", result.is_ok()); result @@ -636,6 +639,7 @@ async fn extract_frame( tasks: Vec<(FrameData, OCREntry)>, frame_tx: FrameChannel, cache_tx: mpsc::Sender, + target_fps: f64, ) -> Result { if !is_video_file_complete(&ffmpeg, &video_file_path).await? { debug!("skipping incomplete video file: {}", video_file_path); @@ -655,11 +659,11 @@ async fn extract_frame( let output_pattern = temp_dir.path().join("frame%d.jpg"); // Calculate frame interval based on target FPS - let frame_interval = (source_fps / 0.1).round() as i64; // Using 0.1 as target FPS + let frame_interval = (source_fps / target_fps).round() as i64; // Using 0.1 as target FPS debug!( "extracting frames with interval {} (source: {}fps, target: {}fps)", - frame_interval, source_fps, 0.1 + frame_interval, source_fps, target_fps ); // Calculate which frames to extract