@veye_xumm 好的,感谢
E
Posts made by EtherealHorizon
-
多相机硬触发同步,时间戳的数值是否可以直接对应
用python多进程为每个相机分别subprocess运行v4l2-ctl命令行然后读取--stream-mmap --verbose的时间戳,这个时间戳的数值在不同相机之间是一致的吗(就是说直接找时间戳最接近的即为同步帧)?还是每个相机的计时器有不同的起始点?
-
MV IMX 287 多相机12bit录制问题
在Jetson AGX Orin上使用ADP-N4连接两个IMX287相机,0号相机接在bus30,1号相机接在bus31
当i2c设置输出为12bit(2)且v4l2也设置pixelformat为XY12的时候,0相机可以以12bit记录,但是1相机会报错
1相机只要i2c设置为2就无法记录,会报错,且即使v4l2设置为12bit也会以8bit记录
报错信息:v4l2-ctl --stream-mmap --stream-count=1000 -d /dev/video1 --verbose --stream-to=test1.raw VIDIOC_QUERYCAP: ok VIDIOC_REQBUFS returned 0 (Success) VIDIOC_QUERYBUF returned 0 (Success) VIDIOC_QUERYBUF returned 0 (Success) VIDIOC_QUERYBUF returned 0 (Success) VIDIOC_QUERYBUF returned 0 (Success) VIDIOC_QBUF returned 0 (Success) VIDIOC_QBUF returned 0 (Success) VIDIOC_QBUF returned 0 (Success) VIDIOC_QBUF returned 0 (Success) VIDIOC_STREAMON returned 0 (Success) cap dqbuf: 0 seq: 0 bytesused: 382976 ts: 0.000000 (error, ts-monotonic, ts-src-eof) cap dqbuf: 1 seq: 1 bytesused: 382976 ts: 0.000000 (error, ts-monotonic, ts-src-eof) cap dqbuf: 2 seq: 2 bytesused: 382976 ts: 0.000000 (error, ts-monotonic, ts-src-eof) cap dqbuf: 3 seq: 3 bytesused: 382976 ts: 0.000000 (error, ts-monotonic, ts-src-eof)
i2c设置:
sudo ./mv_mipi_i2c.sh -w -f pixelformat -p1 2 -b 31 w pixelformat is 2
-
RE: IMX 287 opencv python多线程多相机硬触发帧率下降
@veye_xumm 经过一番尝试后没能找到,最后是用python多进程为每个相机分别subprocess运行v4l2-ctl命令行然后读取--stream-mmap --verbose的时间戳,目前一切顺利。还想请教一下怎么做real time binning,就是实时地将2x2或者4x4的像素值求和再保存,我们的c水平比较烂哈哈。
-
RE: IMX 287 opencv python多线程多相机硬触发帧率下降
@veye_xumm
感谢提醒,更换multi-processing之后帧率就正常了。另一个问题是v4l2-python的版本已经多年没有更新,无法直接获取时间戳,不知道有没有解决方案或者比较成熟的c++案例? -
RE: IMX 287 opencv python多线程多相机硬触发帧率下降
@veye_xumm 测试了200和100fps,实际帧率在67-69左右且不稳定。想请教如何在驱动层做调整?
-
IMX 287 opencv python多线程多相机硬触发帧率下降
如题,通过v4l2设置帧率为最大之后,单线程多相机的帧率与外部硬触发频率基本一致,但是波动比较大。然后使用如下代码进行多线程多相机采集,帧率远低于外部硬触发频率。
import cv2 import argparse import subprocess import time import numpy as np import numpy as np import cv2 import threading from copy import deepcopy thread_lock = threading.Lock() thread_exit = False class VideoThread(threading.Thread): def __init__(self, camera_id, img_height, img_width): super(VideoThread, self).__init__() self.camera_id = camera_id self.img_height = img_height self.img_width = img_width self.frame = np.zeros((img_height, img_width, 3), dtype=np.uint8) def get_frame(self): return deepcopy(self.frame) def run(self): global thread_exit cap = cv2.VideoCapture(self.camera_id) cap.set(cv2.CAP_PROP_FRAME_WIDTH, self.img_width) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, self.img_height) frame_num = 0 timestampes = [] while not thread_exit: ret, frame = cap.read() if ret: #thread_lock.acquire() self.frame = frame if frame_num == 0: time_start = time.perf_counter() frame_num = frame_num + 1 #thread_lock.release() print('get' + self.camera_id[-6:] + f'frame {frame_num}') else: thread_exit = True time_end = time.perf_counter() time_ms = (time_end - time_start)*1000 timestampes.append(time_ms) if frame_num >= 3000: thread_exit = True timestampes = np.array(timestampes) np.save(self.camera_id[-6:] + '.npy', timestampes) cap.release() def main(): # Set up command-line argument parser parser = argparse.ArgumentParser(description='Real-time display of GREY image from mv-cam') parser.add_argument('--roix', type=int, default=0, help='roi start x (default: 0)') parser.add_argument('--roiy', type=int, default=0, help='roi start y (default: 0)') parser.add_argument('--width', type=int, default=704, help='image width (default: 640)') parser.add_argument('--height', type=int, default=544, help='image height (default: 480)') parser.add_argument('--fps', type=int, default=320, help='frame rate (default: 30)') parser.add_argument('--pixelformat', type=int, default=12, help='pixel format (default: 12)') args = parser.parse_args() v4l2_cmd = f"v4l2-ctl --set-ctrl roi_x={args.roix}" subprocess.run(v4l2_cmd, shell=True) v4l2_cmd = f"v4l2-ctl --set-ctrl roi_y={args.roiy}" subprocess.run(v4l2_cmd, shell=True) v4l2_cmd = f"v4l2-ctl --set-fmt-video=width={args.width},height={args.height}" subprocess.run(v4l2_cmd, shell=True) v4l2_cmd = f"v4l2-ctl --set-ctrl frame_rate={args.fps}" subprocess.run(v4l2_cmd, shell=True) v4l2_cmd = "v4l2-ctl --set-ctrl low_latency_mode=1" subprocess.run(v4l2_cmd, shell=True) v4l2_cmd = f"v4l2-ctl --set-ctrl pixelformat=XY{args.pixelformat}" subprocess.run(v4l2_cmd, shell=True) #v4l2_cmd = "v4l2-ctl --set-ctrl exposure_auto=1" #subprocess.run(v4l2_cmd, shell=True) #v4l2_cmd = "v4l2-ctl --set-ctrl exposure_absulute=20" #subprocess.run(v4l2_cmd, shell=True) v4l2_cmd = "v4l2-ctl --set-ctrl trigger_mode=1" subprocess.run(v4l2_cmd, shell=True) v4l2_cmd = "v4l2-ctl --set-ctrl trigger_src=1" subprocess.run(v4l2_cmd, shell=True) v4l2_cmd = "v4l2-ctl --set-ctrl vi_time_out_disable=1" subprocess.run(v4l2_cmd, shell=True) global thread_exit img_height = args.height img_width = args.width thread_0 = VideoThread('/dev/video0', img_height, img_width) thread_1 = VideoThread('/dev/video1', img_height, img_width) thread_0.start() thread_1.start() #frame_num = 0 while not thread_exit: #thread_lock.acquire() #frame_0 = thread_0.get_frame() #print(f'get cam0 {frame_num}') #frame_1 = thread_1.get_frame() #print(f'get cam1 {frame_num}') #thread_lock.release() #thread_0.join() #thread_1.join() #frame_num = frame_num + 1 #if frame_num >= 3000: #thread_exit = True pass print('done!') # Release resources v4l2_cmd = "v4l2-ctl --set-ctrl vi_time_out_disable=0" subprocess.run(v4l2_cmd, shell=True) if __name__ == '__main__': main()