fffiloni commited on
Commit
fd0d56e
1 Parent(s): 3cb296b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -4
app.py CHANGED
@@ -29,6 +29,19 @@ from sam2.build_sam import build_sam2_video_predictor
29
 
30
  from moviepy.editor import ImageSequenceClip
31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  def preprocess_image(image):
33
  return image, gr.State([]), gr.State([]), image, gr.State()
34
 
@@ -258,7 +271,7 @@ def sam_process(input_first_frame_image, checkpoint, tracking_points, trackings_
258
 
259
  return "output_first_frame.jpg", frame_names, inference_state
260
 
261
- def propagate_to_all(checkpoint, stored_inference_state, stored_frame_names, video_frames_dir, vis_frame_type):
262
  #### PROPAGATION ####
263
  sam2_checkpoint, model_cfg = load_model(checkpoint)
264
  predictor = build_sam2_video_predictor(model_cfg, sam2_checkpoint)
@@ -311,7 +324,8 @@ def propagate_to_all(checkpoint, stored_inference_state, stored_frame_names, vid
311
  return gr.update(value=jpeg_images, visible=True), gr.update(visible=False, value=None)
312
  elif vis_frame_type == "render":
313
  # Create a video clip from the image sequence
314
- fps = 24 # Frames per second
 
315
  clip = ImageSequenceClip(jpeg_images, fps=fps)
316
  # Write the result to a file
317
  final_vid_output_path = "output_video.mp4"
@@ -353,7 +367,7 @@ with gr.Blocks() as demo:
353
  with gr.Column():
354
  output_result = gr.Image()
355
  with gr.Row():
356
- vis_frame_type = gr.Radio(choices=["check", "render"], value="render", scale=2)
357
  propagate_btn = gr.Button("Propagate", scale=1)
358
  output_propagated = gr.Gallery(visible=False)
359
  output_video = gr.Video(visible=False)
@@ -388,7 +402,7 @@ with gr.Blocks() as demo:
388
 
389
  propagate_btn.click(
390
  fn = propagate_to_all,
391
- inputs = [checkpoint, stored_inference_state, stored_frame_names, video_frames_dir, vis_frame_type],
392
  outputs = [output_propagated, output_video]
393
  )
394
 
 
29
 
30
  from moviepy.editor import ImageSequenceClip
31
 
32
+ def get_video_fps(video_path):
33
+ # Open the video file
34
+ cap = cv2.VideoCapture(video_path)
35
+
36
+ if not cap.isOpened():
37
+ print("Error: Could not open video.")
38
+ return None
39
+
40
+ # Get the FPS of the video
41
+ fps = cap.get(cv2.CAP_PROP_FPS)
42
+
43
+ return fps
44
+
45
  def preprocess_image(image):
46
  return image, gr.State([]), gr.State([]), image, gr.State()
47
 
 
271
 
272
  return "output_first_frame.jpg", frame_names, inference_state
273
 
274
+ def propagate_to_all(video_in, video_incheckpoint, stored_inference_state, stored_frame_names, video_frames_dir, vis_frame_type):
275
  #### PROPAGATION ####
276
  sam2_checkpoint, model_cfg = load_model(checkpoint)
277
  predictor = build_sam2_video_predictor(model_cfg, sam2_checkpoint)
 
324
  return gr.update(value=jpeg_images, visible=True), gr.update(visible=False, value=None)
325
  elif vis_frame_type == "render":
326
  # Create a video clip from the image sequence
327
+ original_fps = get_video_fps(video_in)
328
+ fps = original_fps # Frames per second
329
  clip = ImageSequenceClip(jpeg_images, fps=fps)
330
  # Write the result to a file
331
  final_vid_output_path = "output_video.mp4"
 
367
  with gr.Column():
368
  output_result = gr.Image()
369
  with gr.Row():
370
+ vis_frame_type = gr.Radio(choices=["check", "render"], value="check", scale=2)
371
  propagate_btn = gr.Button("Propagate", scale=1)
372
  output_propagated = gr.Gallery(visible=False)
373
  output_video = gr.Video(visible=False)
 
402
 
403
  propagate_btn.click(
404
  fn = propagate_to_all,
405
+ inputs = [video_in, checkpoint, stored_inference_state, stored_frame_names, video_frames_dir, vis_frame_type],
406
  outputs = [output_propagated, output_video]
407
  )
408