Skip to content

Commit

Permalink
Merge pull request #782 from zrezke/cam_test_gui
Browse files Browse the repository at this point in the history
Cam test gui
  • Loading branch information
themarpe authored Apr 3, 2023
2 parents 1f162f1 + e914e27 commit 70b96ce
Show file tree
Hide file tree
Showing 4 changed files with 533 additions and 58 deletions.
25 changes: 25 additions & 0 deletions utilities/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,28 @@ pyinstaller --onefile -w --icon=assets/icon.ico --add-data="assets/icon.ico;asse
```

Optionally, append `--runtime-tmpdir [path or .]` to modify where the temporary directory should be created when launched.


## Cam Test
Run:
```sh
python3 cam_test.py
```
To start cam test with GUI.
Run cam_test.py with args to start cam test without GUI:

### Bundled executable
Requirements:
```
# Linux/macOS
python3 -m pip install pyinstaller
# Windows
python -m pip install pyinstaller
```

To build a bundled executable issue the following command:
```sh
pyinstaller -w cam_test.py --hidden-import PyQt5.sip
```

The executable will be located in `dist/cam_test` folder.
186 changes: 128 additions & 58 deletions utilities/cam_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,22 +37,29 @@

import cv2
import argparse
import depthai as dai
import collections
import time
from itertools import cycle
from pathlib import Path
import sys
import cam_test_gui
import signal


def socket_type_pair(arg):
socket, type = arg.split(',')
if not (socket in ['rgb', 'left', 'right', 'camd']): raise ValueError("")
if not (type in ['m', 'mono', 'c', 'color']): raise ValueError("")
if not (socket in ['rgb', 'left', 'right', 'camd']):
raise ValueError("")
if not (type in ['m', 'mono', 'c', 'color']):
raise ValueError("")
is_color = True if type in ['c', 'color'] else False
return [socket, is_color]


parser = argparse.ArgumentParser()
parser.add_argument('-cams', '--cameras', type=socket_type_pair, nargs='+',
default=[['rgb', True], ['left', False], ['right', False], ['camd', True]],
default=[['rgb', True], ['left', False],
['right', False], ['camd', True]],
help="Which camera sockets to enable, and type: c[olor] / m[ono]. "
"E.g: -cams rgb,m right,c . Default: rgb,c left,m right,m camd,c")
parser.add_argument('-mres', '--mono-resolution', type=int, default=800, choices={480, 400, 720, 800},
Expand All @@ -71,8 +78,25 @@ def socket_type_pair(arg):
help="Make OpenCV windows resizable. Note: may introduce some artifacts")
parser.add_argument('-tun', '--camera-tuning', type=Path,
help="Path to custom camera tuning database")
parser.add_argument('-d', '--device', default="", type=str,
help="Optional MX ID of the device to connect to.")

parser.add_argument('-ctimeout', '--connection-timeout', default=30000,
help="Connection timeout in ms. Default: %(default)s (sets DEPTHAI_CONNECTION_TIMEOUT environment variable)")

parser.add_argument('-btimeout', '--boot-timeout', default=30000,
help="Boot timeout in ms. Default: %(default)s (sets DEPTHAI_BOOT_TIMEOUT environment variable)")

args = parser.parse_args()

# Set timeouts before importing depthai
os.environ["DEPTHAI_CONNECTION_TIMEOUT"] = str(args.connection_timeout)
os.environ["DEPTHAI_BOOT_TIMEOUT"] = str(args.boot_timeout)
import depthai as dai

if len(sys.argv) == 1:
cam_test_gui.main()

cam_list = []
cam_type_color = {}
print("Enabled cameras:")
Expand All @@ -85,24 +109,24 @@ def socket_type_pair(arg):
print("DepthAI path:", dai.__file__)

cam_socket_opts = {
'rgb' : dai.CameraBoardSocket.RGB, # Or CAM_A
'left' : dai.CameraBoardSocket.LEFT, # Or CAM_B
'right': dai.CameraBoardSocket.RIGHT, # Or CAM_C
'camd' : dai.CameraBoardSocket.CAM_D,
'rgb': dai.CameraBoardSocket.RGB, # Or CAM_A
'left': dai.CameraBoardSocket.LEFT, # Or CAM_B
'right': dai.CameraBoardSocket.RIGHT, # Or CAM_C
'camd': dai.CameraBoardSocket.CAM_D,
}

cam_socket_to_name = {
'RGB' : 'rgb',
'LEFT' : 'left',
'RGB': 'rgb',
'LEFT': 'left',
'RIGHT': 'right',
'CAM_D': 'camd',
}

rotate = {
'rgb' : args.rotate in ['all', 'rgb'],
'left' : args.rotate in ['all', 'mono'],
'rgb': args.rotate in ['all', 'rgb'],
'left': args.rotate in ['all', 'mono'],
'right': args.rotate in ['all', 'mono'],
'camd' : args.rotate in ['all', 'rgb'],
'camd': args.rotate in ['all', 'rgb'],
}

mono_res_opts = {
Expand Down Expand Up @@ -134,9 +158,11 @@ def __init__(self, window_size=30):
self.fps = 0

def update(self, timestamp=None):
if timestamp == None: timestamp = time.monotonic()
if timestamp == None:
timestamp = time.monotonic()
count = len(self.dq)
if count > 0: self.fps = count / (timestamp - self.dq[0])
if count > 0:
self.fps = count / (timestamp - self.dq[0])
self.dq.append(timestamp)

def get(self):
Expand All @@ -145,7 +171,7 @@ def get(self):
# Start defining a pipeline
pipeline = dai.Pipeline()
# Uncomment to get better throughput
#pipeline.setXLinkChunkSize(0)
# pipeline.setXLinkChunkSize(0)

control = pipeline.createXLinkIn()
control.setStreamName('control')
Expand All @@ -159,21 +185,21 @@ def get(self):
cam[c] = pipeline.createColorCamera()
cam[c].setResolution(color_res_opts[args.color_resolution])
cam[c].setIspScale(1, args.isp_downscale)
#cam[c].initialControl.setManualFocus(85) # TODO
# cam[c].initialControl.setManualFocus(85) # TODO
cam[c].isp.link(xout[c].input)
else:
cam[c] = pipeline.createMonoCamera()
cam[c].setResolution(mono_res_opts[args.mono_resolution])
cam[c].out.link(xout[c].input)
cam[c].setBoardSocket(cam_socket_opts[c])
# Num frames to capture on trigger, with first to be discarded (due to degraded quality)
#cam[c].initialControl.setExternalTrigger(2, 1)
#cam[c].initialControl.setStrobeExternal(48, 1)
#cam[c].initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.INPUT)
# cam[c].initialControl.setExternalTrigger(2, 1)
# cam[c].initialControl.setStrobeExternal(48, 1)
# cam[c].initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.INPUT)

#cam[c].initialControl.setManualExposure(15000, 400) # exposure [us], iso
# cam[c].initialControl.setManualExposure(15000, 400) # exposure [us], iso
# When set, takes effect after the first 2 frames
#cam[c].initialControl.setManualWhiteBalance(4000) # light temperature in K, 1000..12000
# cam[c].initialControl.setManualWhiteBalance(4000) # light temperature in K, 1000..12000
control.out.link(cam[c].inputControl)
if rotate[c]:
cam[c].setImageOrientation(dai.CameraImageOrientation.ROTATE_180_DEG)
Expand All @@ -183,13 +209,26 @@ def get(self):
if args.camera_tuning:
pipeline.setCameraTuningBlobPath(str(args.camera_tuning))

def exit_cleanly(signum, frame):
print("Exiting cleanly")
cv2.destroyAllWindows()
sys.exit(0)

signal.signal(signal.SIGINT, exit_cleanly)


# Pipeline is defined, now we can connect to the device
with dai.Device(pipeline) as device:
#print('Connected cameras:', [c.name for c in device.getConnectedCameras()])
device = dai.Device.getDeviceByMxId(args.device)
dai_device_args = [pipeline]
if device[0]:
dai_device_args.append(device[1])
with dai.Device(*dai_device_args) as device:
# print('Connected cameras:', [c.name for c in device.getConnectedCameras()])
print('Connected cameras:')
cam_name = {}
for p in device.getConnectedCameraFeatures():
print(f' -socket {p.socket.name:6}: {p.sensorName:6} {p.width:4} x {p.height:4} focus:', end='')
print(
f' -socket {p.socket.name:6}: {p.sensorName:6} {p.width:4} x {p.height:4} focus:', end='')
print('auto ' if p.hasAutofocus else 'fixed', '- ', end='')
print(*[type.name for type in p.supportedTypes])
cam_name[cam_socket_to_name[p.socket.name]] = p.sensorName
Expand Down Expand Up @@ -237,9 +276,12 @@ def get(self):
dotIntensity = 0
floodIntensity = 0

awb_mode = cycle([item for name, item in vars(dai.CameraControl.AutoWhiteBalanceMode).items() if name.isupper()])
anti_banding_mode = cycle([item for name, item in vars(dai.CameraControl.AntiBandingMode).items() if name.isupper()])
effect_mode = cycle([item for name, item in vars(dai.CameraControl.EffectMode).items() if name.isupper()])
awb_mode = cycle([item for name, item in vars(
dai.CameraControl.AutoWhiteBalanceMode).items() if name.isupper()])
anti_banding_mode = cycle([item for name, item in vars(
dai.CameraControl.AntiBandingMode).items() if name.isupper()])
effect_mode = cycle([item for name, item in vars(
dai.CameraControl.EffectMode).items() if name.isupper()])

ae_comp = 0
ae_lock = False
Expand All @@ -252,34 +294,43 @@ def get(self):
chroma_denoise = 0
control = 'none'

print("Cam:", *[' ' + c.ljust(8) for c in cam_list], "[host | capture timestamp]")
print("Cam:", *[' ' + c.ljust(8)
for c in cam_list], "[host | capture timestamp]")

capture_list = []
while True:
for c in cam_list:
pkt = q[c].tryGet()
try:
pkt = q[c].tryGet()
except Exception as e:
print(e)
exit_cleanly(0, 0)
if pkt is not None:
fps_host[c].update()
fps_capt[c].update(pkt.getTimestamp().total_seconds())
frame = pkt.getCvFrame()
if c in capture_list:
width, height = pkt.getWidth(), pkt.getHeight()
capture_file_name = ('capture_' + c + '_' + cam_name[c]
+ '_' + str(width) + 'x' + str(height)
+ '_exp_' + str(int(pkt.getExposureTime().total_seconds()*1e6))
+ '_iso_' + str(pkt.getSensitivity())
+ '_lens_' + str(pkt.getLensPosition())
+ '_' + capture_time
+ '_' + str(pkt.getSequenceNum())
+ ".png"
)
+ '_' + str(width) + 'x' + str(height)
+ '_exp_' +
str(int(
pkt.getExposureTime().total_seconds()*1e6))
+ '_iso_' + str(pkt.getSensitivity())
+ '_lens_' +
str(pkt.getLensPosition())
+ '_' + capture_time
+ '_' + str(pkt.getSequenceNum())
+ ".png"
)
print("\nSaving:", capture_file_name)
cv2.imwrite(capture_file_name, frame)
capture_list.remove(c)

cv2.imshow(c, frame)
print("\rFPS:",
*["{:6.2f}|{:6.2f}".format(fps_host[c].get(), fps_capt[c].get()) for c in cam_list],
*["{:6.2f}|{:6.2f}".format(fps_host[c].get(),
fps_capt[c].get()) for c in cam_list],
end='', flush=True)

key = cv2.waitKey(1)
Expand All @@ -297,26 +348,33 @@ def get(self):
elif key == ord('f'):
print("Autofocus enable, continuous")
ctrl = dai.CameraControl()
ctrl.setAutoFocusMode(dai.CameraControl.AutoFocusMode.CONTINUOUS_VIDEO)
ctrl.setAutoFocusMode(
dai.CameraControl.AutoFocusMode.CONTINUOUS_VIDEO)
controlQueue.send(ctrl)
elif key == ord('e'):
print("Autoexposure enable")
ctrl = dai.CameraControl()
ctrl.setAutoExposureEnable()
controlQueue.send(ctrl)
elif key in [ord(','), ord('.')]:
if key == ord(','): lensPos -= LENS_STEP
if key == ord('.'): lensPos += LENS_STEP
if key == ord(','):
lensPos -= LENS_STEP
if key == ord('.'):
lensPos += LENS_STEP
lensPos = clamp(lensPos, lensMin, lensMax)
print("Setting manual focus, lens position: ", lensPos)
ctrl = dai.CameraControl()
ctrl.setManualFocus(lensPos)
controlQueue.send(ctrl)
elif key in [ord('i'), ord('o'), ord('k'), ord('l')]:
if key == ord('i'): expTime -= EXP_STEP
if key == ord('o'): expTime += EXP_STEP
if key == ord('k'): sensIso -= ISO_STEP
if key == ord('l'): sensIso += ISO_STEP
if key == ord('i'):
expTime -= EXP_STEP
if key == ord('o'):
expTime += EXP_STEP
if key == ord('k'):
sensIso -= ISO_STEP
if key == ord('l'):
sensIso += ISO_STEP
expTime = clamp(expTime, expMin, expMax)
sensIso = clamp(sensIso, sensMin, sensMax)
print("Setting manual exposure, time: ", expTime, "iso: ", sensIso)
Expand Down Expand Up @@ -356,21 +414,33 @@ def get(self):
floodIntensity = 0
device.setIrFloodLightBrightness(floodIntensity)
elif key >= 0 and chr(key) in '34567890[]':
if key == ord('3'): control = 'awb_mode'
elif key == ord('4'): control = 'ae_comp'
elif key == ord('5'): control = 'anti_banding_mode'
elif key == ord('6'): control = 'effect_mode'
elif key == ord('7'): control = 'brightness'
elif key == ord('8'): control = 'contrast'
elif key == ord('9'): control = 'saturation'
elif key == ord('0'): control = 'sharpness'
elif key == ord('['): control = 'luma_denoise'
elif key == ord(']'): control = 'chroma_denoise'
if key == ord('3'):
control = 'awb_mode'
elif key == ord('4'):
control = 'ae_comp'
elif key == ord('5'):
control = 'anti_banding_mode'
elif key == ord('6'):
control = 'effect_mode'
elif key == ord('7'):
control = 'brightness'
elif key == ord('8'):
control = 'contrast'
elif key == ord('9'):
control = 'saturation'
elif key == ord('0'):
control = 'sharpness'
elif key == ord('['):
control = 'luma_denoise'
elif key == ord(']'):
control = 'chroma_denoise'
print("Selected control:", control)
elif key in [ord('-'), ord('_'), ord('+'), ord('=')]:
change = 0
if key in [ord('-'), ord('_')]: change = -1
if key in [ord('+'), ord('=')]: change = 1
if key in [ord('-'), ord('_')]:
change = -1
if key in [ord('+'), ord('=')]:
change = 1
ctrl = dai.CameraControl()
if control == 'none':
print("Please select a control first using keys 3..9 0 [ ]")
Expand Down
Loading

0 comments on commit 70b96ce

Please sign in to comment.