2023-05-29 05:31:17 -05:00
|
|
|
import csv
|
2020-12-12 09:12:15 -06:00
|
|
|
import json
|
|
|
|
import logging
|
2020-09-07 12:17:42 -05:00
|
|
|
import multiprocessing as mp
|
2020-12-12 09:12:15 -06:00
|
|
|
import os
|
|
|
|
import subprocess as sp
|
|
|
|
import sys
|
|
|
|
|
|
|
|
import click
|
2020-09-07 12:17:42 -05:00
|
|
|
import cv2
|
2020-12-12 09:12:15 -06:00
|
|
|
import numpy as np
|
|
|
|
|
2023-05-31 09:12:43 -05:00
|
|
|
sys.path.append("/workspace/frigate")
|
2023-05-29 05:31:17 -05:00
|
|
|
|
|
|
|
from frigate.config import FrigateConfig # noqa: E402
|
|
|
|
from frigate.motion import MotionDetector # noqa: E402
|
|
|
|
from frigate.object_detection import LocalObjectDetector # noqa: E402
|
|
|
|
from frigate.object_processing import CameraState # noqa: E402
|
2023-05-31 09:12:43 -05:00
|
|
|
from frigate.track.centroid_tracker import CentroidTracker # noqa: E402
|
2023-05-29 05:31:17 -05:00
|
|
|
from frigate.util import ( # noqa: E402
|
2021-02-17 07:23:32 -06:00
|
|
|
EventsPerSecond,
|
|
|
|
SharedMemoryFrameManager,
|
|
|
|
draw_box_with_label,
|
|
|
|
)
|
2023-05-29 05:31:17 -05:00
|
|
|
from frigate.video import ( # noqa: E402
|
|
|
|
capture_frames,
|
|
|
|
process_frames,
|
|
|
|
start_or_restart_ffmpeg,
|
|
|
|
)
|
2020-12-12 09:12:15 -06:00
|
|
|
|
2024-09-17 08:26:25 -05:00
|
|
|
logging.basicConfig(level=logging.DEBUG)
|
2020-12-12 09:12:15 -06:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2021-02-17 07:23:32 -06:00
|
|
|
|
2020-12-12 09:12:15 -06:00
|
|
|
def get_frame_shape(source):
|
2021-05-21 22:35:25 -05:00
|
|
|
ffprobe_cmd = [
|
|
|
|
"ffprobe",
|
|
|
|
"-v",
|
|
|
|
"panic",
|
|
|
|
"-show_error",
|
|
|
|
"-show_streams",
|
|
|
|
"-of",
|
|
|
|
"json",
|
|
|
|
source,
|
|
|
|
]
|
|
|
|
p = sp.run(ffprobe_cmd, capture_output=True)
|
|
|
|
info = json.loads(p.stdout)
|
2020-12-12 09:12:15 -06:00
|
|
|
|
2021-02-17 07:23:32 -06:00
|
|
|
video_info = [s for s in info["streams"] if s["codec_type"] == "video"][0]
|
|
|
|
|
|
|
|
if video_info["height"] != 0 and video_info["width"] != 0:
|
|
|
|
return (video_info["height"], video_info["width"], 3)
|
2020-12-12 09:12:15 -06:00
|
|
|
|
2024-04-20 06:16:43 -05:00
|
|
|
# fallback to using opencv if ffprobe didn't succeed
|
2020-12-12 09:12:15 -06:00
|
|
|
video = cv2.VideoCapture(source)
|
|
|
|
ret, frame = video.read()
|
|
|
|
frame_shape = frame.shape
|
|
|
|
video.release()
|
|
|
|
return frame_shape
|
2020-09-07 12:17:42 -05:00
|
|
|
|
2021-02-17 07:23:32 -06:00
|
|
|
|
|
|
|
class ProcessClip:
|
2020-12-12 09:12:15 -06:00
|
|
|
def __init__(self, clip_path, frame_shape, config: FrigateConfig):
|
2020-09-07 12:17:42 -05:00
|
|
|
self.clip_path = clip_path
|
2021-02-17 07:23:32 -06:00
|
|
|
self.camera_name = "camera"
|
2020-12-12 09:12:15 -06:00
|
|
|
self.config = config
|
2021-02-17 07:23:32 -06:00
|
|
|
self.camera_config = self.config.cameras["camera"]
|
2020-12-12 09:12:15 -06:00
|
|
|
self.frame_shape = self.camera_config.frame_shape
|
2021-02-17 07:23:32 -06:00
|
|
|
self.ffmpeg_cmd = [
|
|
|
|
c["cmd"] for c in self.camera_config.ffmpeg_cmds if "detect" in c["roles"]
|
|
|
|
][0]
|
2020-12-12 09:12:15 -06:00
|
|
|
self.frame_manager = SharedMemoryFrameManager()
|
2020-09-07 12:17:42 -05:00
|
|
|
self.frame_queue = mp.Queue()
|
|
|
|
self.detected_objects_queue = mp.Queue()
|
|
|
|
self.camera_state = CameraState(self.camera_name, config, self.frame_manager)
|
|
|
|
|
|
|
|
def load_frames(self):
|
|
|
|
fps = EventsPerSecond()
|
|
|
|
skipped_fps = EventsPerSecond()
|
2021-02-17 07:23:32 -06:00
|
|
|
current_frame = mp.Value("d", 0.0)
|
|
|
|
frame_size = (
|
|
|
|
self.camera_config.frame_shape_yuv[0]
|
|
|
|
* self.camera_config.frame_shape_yuv[1]
|
|
|
|
)
|
|
|
|
ffmpeg_process = start_or_restart_ffmpeg(
|
|
|
|
self.ffmpeg_cmd, logger, sp.DEVNULL, frame_size
|
|
|
|
)
|
|
|
|
capture_frames(
|
|
|
|
ffmpeg_process,
|
|
|
|
self.camera_name,
|
|
|
|
self.camera_config.frame_shape_yuv,
|
|
|
|
self.frame_manager,
|
|
|
|
self.frame_queue,
|
|
|
|
fps,
|
|
|
|
skipped_fps,
|
|
|
|
current_frame,
|
|
|
|
)
|
2020-09-07 12:17:42 -05:00
|
|
|
ffmpeg_process.wait()
|
|
|
|
ffmpeg_process.communicate()
|
2021-02-17 07:23:32 -06:00
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
def process_frames(
|
|
|
|
self, object_detector, objects_to_track=["person"], object_filters={}
|
|
|
|
):
|
2020-09-07 12:17:42 -05:00
|
|
|
mask = np.zeros((self.frame_shape[0], self.frame_shape[1], 1), np.uint8)
|
|
|
|
mask[:] = 255
|
2021-11-07 14:55:09 -06:00
|
|
|
motion_detector = MotionDetector(self.frame_shape, self.camera_config.motion)
|
|
|
|
motion_detector.save_images = False
|
2020-09-07 12:17:42 -05:00
|
|
|
|
2023-05-31 09:12:43 -05:00
|
|
|
object_tracker = CentroidTracker(self.camera_config.detect)
|
2020-12-12 09:12:15 -06:00
|
|
|
process_info = {
|
2021-02-17 07:23:32 -06:00
|
|
|
"process_fps": mp.Value("d", 0.0),
|
|
|
|
"detection_fps": mp.Value("d", 0.0),
|
|
|
|
"detection_frame": mp.Value("d", 0.0),
|
2020-12-12 09:12:15 -06:00
|
|
|
}
|
2021-11-07 14:55:09 -06:00
|
|
|
|
|
|
|
detection_enabled = mp.Value("d", 1)
|
2022-04-26 07:29:28 -05:00
|
|
|
motion_enabled = mp.Value("d", True)
|
2020-09-07 12:17:42 -05:00
|
|
|
stop_event = mp.Event()
|
|
|
|
|
2021-02-17 07:23:32 -06:00
|
|
|
process_frames(
|
|
|
|
self.camera_name,
|
|
|
|
self.frame_queue,
|
|
|
|
self.frame_shape,
|
2022-11-03 21:23:09 -05:00
|
|
|
self.config.model,
|
2021-11-07 14:55:09 -06:00
|
|
|
self.camera_config.detect,
|
2021-02-17 07:23:32 -06:00
|
|
|
self.frame_manager,
|
|
|
|
motion_detector,
|
|
|
|
object_detector,
|
|
|
|
object_tracker,
|
|
|
|
self.detected_objects_queue,
|
|
|
|
process_info,
|
|
|
|
objects_to_track,
|
|
|
|
object_filters,
|
2021-11-07 14:55:09 -06:00
|
|
|
detection_enabled,
|
2022-04-26 07:29:28 -05:00
|
|
|
motion_enabled,
|
2021-02-17 07:23:32 -06:00
|
|
|
stop_event,
|
|
|
|
exit_on_empty=True,
|
|
|
|
)
|
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
def stats(self, debug_path=None):
|
|
|
|
total_regions = 0
|
|
|
|
total_motion_boxes = 0
|
|
|
|
object_ids = set()
|
|
|
|
total_frames = 0
|
2021-02-17 07:23:32 -06:00
|
|
|
|
|
|
|
while not self.detected_objects_queue.empty():
|
|
|
|
(
|
|
|
|
camera_name,
|
|
|
|
frame_time,
|
|
|
|
current_tracked_objects,
|
|
|
|
motion_boxes,
|
|
|
|
regions,
|
|
|
|
) = self.detected_objects_queue.get()
|
2021-11-07 14:55:09 -06:00
|
|
|
|
|
|
|
if debug_path:
|
2021-02-17 07:23:32 -06:00
|
|
|
self.save_debug_frame(
|
|
|
|
debug_path, frame_time, current_tracked_objects.values()
|
|
|
|
)
|
|
|
|
|
|
|
|
self.camera_state.update(
|
|
|
|
frame_time, current_tracked_objects, motion_boxes, regions
|
|
|
|
)
|
2021-11-07 14:55:09 -06:00
|
|
|
total_regions += len(regions)
|
|
|
|
total_motion_boxes += len(motion_boxes)
|
2021-12-05 11:06:39 -06:00
|
|
|
top_score = 0
|
2021-11-07 14:55:09 -06:00
|
|
|
for id, obj in self.camera_state.tracked_objects.items():
|
|
|
|
if not obj.false_positive:
|
|
|
|
object_ids.add(id)
|
2021-12-05 11:06:39 -06:00
|
|
|
if obj.top_score > top_score:
|
|
|
|
top_score = obj.top_score
|
2020-09-07 12:17:42 -05:00
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
total_frames += 1
|
2021-02-17 07:23:32 -06:00
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
self.frame_manager.delete(self.camera_state.previous_frame_id)
|
|
|
|
|
|
|
|
return {
|
|
|
|
"total_regions": total_regions,
|
|
|
|
"total_motion_boxes": total_motion_boxes,
|
|
|
|
"true_positive_objects": len(object_ids),
|
|
|
|
"total_frames": total_frames,
|
2021-12-05 11:06:39 -06:00
|
|
|
"top_score": top_score,
|
2021-11-07 14:55:09 -06:00
|
|
|
}
|
2021-02-17 07:23:32 -06:00
|
|
|
|
2020-09-07 12:17:42 -05:00
|
|
|
def save_debug_frame(self, debug_path, frame_time, tracked_objects):
|
2021-02-17 07:23:32 -06:00
|
|
|
current_frame = cv2.cvtColor(
|
|
|
|
self.frame_manager.get(
|
|
|
|
f"{self.camera_name}{frame_time}", self.camera_config.frame_shape_yuv
|
|
|
|
),
|
|
|
|
cv2.COLOR_YUV2BGR_I420,
|
|
|
|
)
|
2020-09-07 12:17:42 -05:00
|
|
|
# draw the bounding boxes on the frame
|
|
|
|
for obj in tracked_objects:
|
|
|
|
thickness = 2
|
2021-02-17 07:23:32 -06:00
|
|
|
color = (0, 0, 175)
|
|
|
|
if obj["frame_time"] != frame_time:
|
2020-09-07 12:17:42 -05:00
|
|
|
thickness = 1
|
2021-02-17 07:23:32 -06:00
|
|
|
color = (255, 0, 0)
|
2020-09-07 12:17:42 -05:00
|
|
|
else:
|
2021-02-17 07:23:32 -06:00
|
|
|
color = (255, 255, 0)
|
2020-09-07 12:17:42 -05:00
|
|
|
|
|
|
|
# draw the bounding boxes on the frame
|
2021-02-17 07:23:32 -06:00
|
|
|
box = obj["box"]
|
|
|
|
draw_box_with_label(
|
|
|
|
current_frame,
|
|
|
|
box[0],
|
|
|
|
box[1],
|
|
|
|
box[2],
|
|
|
|
box[3],
|
|
|
|
obj["id"],
|
|
|
|
f"{int(obj['score']*100)}% {int(obj['area'])}",
|
|
|
|
thickness=thickness,
|
|
|
|
color=color,
|
|
|
|
)
|
2020-09-07 12:17:42 -05:00
|
|
|
# draw the regions on the frame
|
2021-02-17 07:23:32 -06:00
|
|
|
region = obj["region"]
|
|
|
|
draw_box_with_label(
|
|
|
|
current_frame,
|
|
|
|
region[0],
|
|
|
|
region[1],
|
|
|
|
region[2],
|
|
|
|
region[3],
|
|
|
|
"region",
|
|
|
|
"",
|
|
|
|
thickness=1,
|
|
|
|
color=(0, 255, 0),
|
|
|
|
)
|
|
|
|
|
|
|
|
cv2.imwrite(
|
|
|
|
f"{os.path.join(debug_path, os.path.basename(self.clip_path))}.{int(frame_time*1000000)}.jpg",
|
|
|
|
current_frame,
|
|
|
|
)
|
|
|
|
|
2020-09-07 12:17:42 -05:00
|
|
|
|
|
|
|
@click.command()
|
|
|
|
@click.option("-p", "--path", required=True, help="Path to clip or directory to test.")
|
2021-02-17 07:23:32 -06:00
|
|
|
@click.option("-l", "--label", default="person", help="Label name to detect.")
|
2021-11-07 14:55:09 -06:00
|
|
|
@click.option("-o", "--output", default=None, help="File to save csv of data")
|
2020-09-07 12:17:42 -05:00
|
|
|
@click.option("--debug-path", default=None, help="Path to output frames for debugging.")
|
2021-11-07 14:55:09 -06:00
|
|
|
def process(path, label, output, debug_path):
|
2020-09-07 12:17:42 -05:00
|
|
|
clips = []
|
|
|
|
if os.path.isdir(path):
|
|
|
|
files = os.listdir(path)
|
|
|
|
files.sort()
|
|
|
|
clips = [os.path.join(path, file) for file in files]
|
2021-02-17 07:23:32 -06:00
|
|
|
elif os.path.isfile(path):
|
2020-09-07 12:17:42 -05:00
|
|
|
clips.append(path)
|
|
|
|
|
2020-12-12 09:12:15 -06:00
|
|
|
json_config = {
|
2023-05-31 09:12:43 -05:00
|
|
|
"mqtt": {"enabled": False},
|
2021-11-07 14:55:09 -06:00
|
|
|
"detectors": {"coral": {"type": "edgetpu", "device": "usb"}},
|
2021-02-17 07:23:32 -06:00
|
|
|
"cameras": {
|
|
|
|
"camera": {
|
|
|
|
"ffmpeg": {
|
|
|
|
"inputs": [
|
|
|
|
{
|
|
|
|
"path": "path.mp4",
|
2021-11-07 14:55:09 -06:00
|
|
|
"global_args": "-hide_banner",
|
|
|
|
"input_args": "-loglevel info",
|
2021-02-17 07:23:32 -06:00
|
|
|
"roles": ["detect"],
|
|
|
|
}
|
2020-12-12 09:12:15 -06:00
|
|
|
]
|
|
|
|
},
|
2021-11-07 14:55:09 -06:00
|
|
|
"record": {"enabled": False},
|
2020-09-07 12:17:42 -05:00
|
|
|
}
|
2021-02-17 07:23:32 -06:00
|
|
|
},
|
2020-09-07 12:17:42 -05:00
|
|
|
}
|
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
object_detector = LocalObjectDetector(labels="/labelmap.txt")
|
|
|
|
|
2020-09-07 12:17:42 -05:00
|
|
|
results = []
|
|
|
|
for c in clips:
|
2020-12-12 09:12:15 -06:00
|
|
|
logger.info(c)
|
2020-09-07 12:17:42 -05:00
|
|
|
frame_shape = get_frame_shape(c)
|
2021-02-17 07:23:32 -06:00
|
|
|
|
2021-11-08 07:29:01 -06:00
|
|
|
json_config["cameras"]["camera"]["detect"] = {
|
|
|
|
"height": frame_shape[0],
|
|
|
|
"width": frame_shape[1],
|
|
|
|
}
|
2021-02-17 07:23:32 -06:00
|
|
|
json_config["cameras"]["camera"]["ffmpeg"]["inputs"][0]["path"] = c
|
2020-12-12 09:12:15 -06:00
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
frigate_config = FrigateConfig(**json_config)
|
2024-09-22 10:56:57 -05:00
|
|
|
process_clip = ProcessClip(c, frame_shape, frigate_config)
|
2020-09-07 12:17:42 -05:00
|
|
|
process_clip.load_frames()
|
2021-11-07 14:55:09 -06:00
|
|
|
process_clip.process_frames(object_detector, objects_to_track=[label])
|
2020-09-07 12:17:42 -05:00
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
results.append((c, process_clip.stats(debug_path)))
|
2020-09-07 12:17:42 -05:00
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
positive_count = sum(
|
|
|
|
1 for result in results if result[1]["true_positive_objects"] > 0
|
|
|
|
)
|
2021-02-17 07:23:32 -06:00
|
|
|
print(
|
|
|
|
f"Objects were detected in {positive_count}/{len(results)}({positive_count/len(results)*100:.2f}%) clip(s)."
|
|
|
|
)
|
|
|
|
|
2021-11-07 14:55:09 -06:00
|
|
|
if output:
|
|
|
|
# now we will open a file for writing
|
|
|
|
data_file = open(output, "w")
|
|
|
|
|
|
|
|
# create the csv writer object
|
|
|
|
csv_writer = csv.writer(data_file)
|
|
|
|
|
|
|
|
# Counter variable used for writing
|
|
|
|
# headers to the CSV file
|
|
|
|
count = 0
|
|
|
|
|
|
|
|
for result in results:
|
|
|
|
if count == 0:
|
|
|
|
# Writing headers of CSV file
|
|
|
|
header = ["file"] + list(result[1].keys())
|
|
|
|
csv_writer.writerow(header)
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
# Writing data of CSV file
|
|
|
|
csv_writer.writerow([result[0]] + list(result[1].values()))
|
|
|
|
|
|
|
|
data_file.close()
|
|
|
|
|
2021-02-17 07:23:32 -06:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2020-12-12 09:12:15 -06:00
|
|
|
process()
|