(
[]
@@ -229,6 +230,7 @@ export function RecordingSettings({
includedWindows: localSettings.includedWindows,
deepgramApiKey: localSettings.deepgramApiKey,
fps: localSettings.fps,
+ timelineFps: localSettings.timelineFps,
vadSensitivity: localSettings.vadSensitivity,
audioChunkDuration: localSettings.audioChunkDuration,
analyticsEnabled: localSettings.analyticsEnabled,
@@ -384,6 +386,10 @@ export function RecordingSettings({
setLocalSettings({ ...localSettings, fps: value[0] });
};
+ const handleTimelineFpsChange = (value: number[]) => {
+ setLocalSettings({ ...localSettings, timelineFps: value[0] });
+ };
+
const handleVadSensitivityChange = (value: number[]) => {
const sensitivityMap: { [key: number]: VadSensitivity } = {
2: "high",
@@ -583,6 +589,7 @@ export function RecordingSettings({
};
const handleFrameCacheToggle = (checked: boolean) => {
+ setIsTimelineEnabled(checked);
setLocalSettings({
...localSettings,
enableFrameCache: checked,
@@ -1469,7 +1476,7 @@ export function RecordingSettings({
+ {isTimelineEnabled && (
+
+
+
+ {localSettings.timelineFps.toFixed(1)}
+
+
+
+ )}
{isMacOS && (
anyhow::Result<()> {
// Ensure 'setx' command can handle the new PATH length
if current_path.len() + new_path.to_str().unwrap_or("").len() + 1 > 1024 {
- return Err(anyhow::anyhow!(
- "the PATH is too long to persist using 'setx'. please shorten the PATH."
- ));
+ debug!("the PATH is too long to persist using 'setx'. please shorten the PATH.");
+ return Ok(());
}
// Construct the new PATH string
diff --git a/screenpipe-server/src/server.rs b/screenpipe-server/src/server.rs
index a007da719..5ab1aceae 100644
--- a/screenpipe-server/src/server.rs
+++ b/screenpipe-server/src/server.rs
@@ -1219,6 +1219,7 @@ enum Order {
pub struct StreamFramesRequest {
start_time: DateTime,
end_time: DateTime,
+ target_fps: Option,
// #[serde(rename = "order")]
// #[serde(default = "descending")]
// order: Order,
@@ -1370,9 +1371,10 @@ async fn stream_frames_handler(
// Spawn frame extraction task using get_frames
tokio::spawn({
let frame_tx = frame_tx.clone();
+ let target_fps = request.target_fps.unwrap_or(0.1);
async move {
tokio::select! {
- result = cache.get_frames(center_timestamp, duration_minutes, frame_tx.clone(), true) => {
+ result = cache.get_frames(center_timestamp, duration_minutes, frame_tx.clone(), true, target_fps) => {
if let Err(e) = result {
error!("frame extraction failed: {}", e);
// Send error to client
diff --git a/screenpipe-server/src/video_cache.rs b/screenpipe-server/src/video_cache.rs
index 6f7113178..19ab8ab7f 100644
--- a/screenpipe-server/src/video_cache.rs
+++ b/screenpipe-server/src/video_cache.rs
@@ -458,6 +458,7 @@ impl FrameCache {
start_time: DateTime,
end_time: DateTime,
frame_tx: FrameChannel,
+ target_fps: f64,
) -> Result<()> {
let mut extraction_queue = HashMap::new();
let mut total_frames = 0;
@@ -545,6 +546,7 @@ impl FrameCache {
tasks,
frame_tx.clone(),
self.cache_tx.clone(),
+ target_fps,
)
.await?;
total_frames += extracted;
@@ -561,6 +563,7 @@ impl FrameCache {
duration_minutes: i64,
frame_tx: Sender,
descending: bool,
+ target_fps: f64,
) -> Result<()> {
let start = timestamp - Duration::minutes(duration_minutes / 2);
let end = timestamp + Duration::minutes(duration_minutes / 2);
@@ -575,7 +578,7 @@ impl FrameCache {
let cache_clone = self.clone();
tokio::spawn(async move {
let result = cache_clone
- .extract_frames_batch(start, end, extract_tx)
+ .extract_frames_batch(start, end, extract_tx, target_fps)
.await;
debug!("extraction task completed: {:?}", result.is_ok());
result
@@ -636,6 +639,7 @@ async fn extract_frame(
tasks: Vec<(FrameData, OCREntry)>,
frame_tx: FrameChannel,
cache_tx: mpsc::Sender,
+ target_fps: f64,
) -> Result {
if !is_video_file_complete(&ffmpeg, &video_file_path).await? {
debug!("skipping incomplete video file: {}", video_file_path);
@@ -655,11 +659,11 @@ async fn extract_frame(
let output_pattern = temp_dir.path().join("frame%d.jpg");
// Calculate frame interval based on target FPS
- let frame_interval = (source_fps / 0.1).round() as i64; // Using 0.1 as target FPS
+ let frame_interval = (source_fps / target_fps).round() as i64; // Using 0.1 as target FPS
debug!(
"extracting frames with interval {} (source: {}fps, target: {}fps)",
- frame_interval, source_fps, 0.1
+ frame_interval, source_fps, target_fps
);
// Calculate which frames to extract