Usage¶
Here is a small usage example of setting exposure time, and printing what value actually got set (this might not always be exactly the value we requested), then start the stream and get a single frame and frame metadata
import qamlib
cam = qamlib.Camera() # Opens /dev/qtec/video0
# Try to set exposure time (us)
cam.set_control("Exposure Time, Absolute", 1000)
exp = cam.get_control("Exposure Time, Absolute")
# Print the exposure time that we ended up with
print(f"Got exposure time {exp}us")
# Start and stop streaming with context manager
with cam:
meta, frame = cam.get_frame()
print(meta.sequence) # Frame number since start of streaming
Bigger example¶
1"""Demonstrates various Qamlib features."""
2
3import sys
4
5import qamlib
6
7
8def needs_byteswap(frame):
9 """Check if a byte-swap is needed for the PNM format."""
10 # 8-bit
11 if frame.itemsize == 1:
12 return False
13 # Big Endian
14 if frame.dtype.byteorder == ">":
15 return False
16 # Big Endian native byte order
17 if frame.dtype.byteorder == "=" and sys.byteorder == "big":
18 return False
19 return True
20
21
22def write_ppm(name, frame):
23 """Write frame as PNM file."""
24 with open(name, "wb") as f:
25 height, width, channels = frame.shape
26
27 if channels == 1:
28 type = "P5"
29 else:
30 type = "P6"
31
32 if frame.nbytes == width * height * channels:
33 if frame.dtype == "uint8":
34 max_val = 255
35 else:
36 max_val = 65535
37 else:
38 max_val = 65535
39
40 ppm_header = f"{type} {width} {height} {max_val}\n"
41 f.write(bytearray(ppm_header, "ascii"))
42
43 if needs_byteswap(frame):
44 # swap data in memory in order to properly write PNM
45 # since PPM is Big Endian by definition
46 # Note that if the operation is done in place
47 # it will mess up the endianess when reading single values out
48 # so in this case use: frame = frame.byteswap(True).newbyteorder()
49 # in order to keep the correct byte order
50 frame.byteswap().tofile(f)
51 else:
52 frame.tofile(f)
53
54
55if __name__ == "__main__":
56 if len(sys.argv) > 1:
57 device = sys.argv[1]
58 else:
59 device = "/dev/qtec/video0"
60
61 # Open video capture device
62 try:
63 cam = qamlib.Camera(device)
64 except Exception as e:
65 print(e)
66 exit(-1)
67
68 print("List of formats:")
69 fmts = cam.list_formats()
70 for name in fmts:
71 print(fmts[name])
72 print("\n")
73
74 # V4l2 "Settings"
75 fps = 20.0
76 width = 800
77 height = 600
78
79 cam.set_format("Y16_BE")
80 # cam.set_format("Y16")
81 # cam.set_format("GREY")
82 cam.set_framerate(fps)
83 cam.set_resolution(width, height)
84
85 # crop to center
86 bounds = cam.get_crop_bounds()
87 def_rect = cam.get_crop_default()
88 left = int(((bounds.width - bounds.left) - width) / 2) + bounds.left
89 top = int(((bounds.height - bounds.top) - height) / 2) + bounds.top
90 cam.set_crop(left, top, width, height)
91
92 img_format = cam.get_format()
93 px_format = img_format.pixelformat
94
95 print(f"FPS: {cam.get_framerate()}")
96 print(f"Frame Size: {cam.get_resolution()}")
97 print(f"Crop: {cam.get_crop()}")
98 print(f"Pixel format: {img_format}")
99 print("\n")
100
101 # V4l2 Controls
102 ctrls = cam.list_controls()
103 print("Found ", len(ctrls), " controls")
104 for name in ctrls:
105 print(ctrls[name])
106 print("\n")
107
108 cam.set_control("Exposure Time Absolute", 5000)
109 print(f"Exposure: {cam.get_control('Exposure Time Absolute')}")
110
111 # Frame capture
112 print("Starting Streaming")
113 with cam:
114 for i in range(10):
115 metadata, frame = cam.get_frame()
116
117 print(metadata)
118 print(frame.size, " ", frame.shape)
119
120 write_ppm(f"/tmp/img{i}.pnm", frame)
121 print("Done Streaming")
ExtendedControl usage¶
1"""Demonstrates how to use extended V4L2 controls."""
2
3import numpy as np
4
5import qamlib
6
7
8cam = qamlib.Camera() # /dev/qtec/video0
9
10ctrls = cam.list_controls()
11
12ctrl_lut = ctrls["lut red"]
13
14assert ctrl_lut.type == qamlib.ControlType.INTEGER
15
16# If the control is INTEGER and has a payload it is an ArrayControl
17assert ctrl_lut.flags.has_payload
18
19arr = np.ones([ctrl_lut.elements], dtype=np.int32)
20
21arr[500:] = 12**2
22
23lut_red = qamlib.ArrayControlValue(arr)
24
25lut_green = cam.get_ext_control("lut green")
26
27exposure = qamlib.IntegerControlValue(9992)
28
29new_values = {
30 "lut red": lut_red,
31 "lut blue": lut_green,
32 "exposure time absolute": exposure,
33}
34
35cam.set_controls(new_values)
36
37print(cam.get_controls(["lut red", "lut blue", "exposure time absolute"]))
HDR example¶
1"""Example of doing "HDR" with a trigger sequence.
2
3This uses features from Qtechnology kernel patches
4"""
5
6import cv2
7
8import qamlib
9
10
11exposures = [5000, 15000, 29000, 41000, 50000]
12
13# Defaults to /dev/qtec/video0
14cam = qamlib.Camera()
15
16# External trigger sequence
17cam.set_control("trigger_mode", 5)
18
19trig_seq = qamlib.TriggerSequenceValue()
20
21# Used to calculate minimal frame delay
22fot = cam.get_control("frame_overhead_time")
23rot = cam.get_control("read_out_time")
24
25# Create trigger sequence with minimal delay
26for i in range(len(exposures)):
27 exp = exposures[i]
28 if i + 1 >= len(exposures):
29 delay = max(exp + fot, exp + fot + rot - exposures[0])
30 else:
31 delay = max(exp + fot, exp + fot + rot - exposures[i + 1])
32
33 trig_seq.add_exposure(exp, exp, delay, 0)
34
35# Set trigger sequence
36cam.set_ext_control("trigger sequence", trig_seq)
37
38# Set white balance, values from qtec-camera-gwt
39cam.set_control("Red Balance", 32604)
40cam.set_control("Green Balance", 16384)
41cam.set_control("Blue Balance", 30802)
42
43# Use BGR to avoid having to make OpenCV do a RGB -> BGR conversion
44cam.set_format("BGR3")
45
46# HDR Fusion merger
47merge = cv2.createMergeMertens()
48
49# Start streaming
50with cam:
51 images = []
52
53 # Trigger capture, since we are in external trigger mode
54 cam.set_control("Manual Trigger", 1)
55
56 for exposure in exposures:
57 name = f"exposure_{exposure}.png"
58
59 _, img = cam.get_frame(timeout=1, buffered=True)
60
61 cv2.imwrite(name, img)
62
63 images.append(img)
64
65 fusion = merge.process(images)
66 cv2.imwrite("fusion.png", fusion * 255) # Values after merge are in [0, 1]
Events example¶
1"""Demonstrates how to use Qamlib events."""
2
3import qamlib
4
5
6def _cb(event):
7 global num_events
8 if event.type == qamlib.EventType.CTRL:
9 print("Got control event")
10 else:
11 raise Exception("Unkown event type")
12
13
14evs = qamlib.EventDevice()
15
16evs.set_callback(_cb)
17
18cam = qamlib.Camera()
19ctrls = cam.list_controls()
20
21# Subscribe to events for all controls
22for c in ctrls:
23 evs.subscribe(qamlib.EventType.CTRL, ctrls[c].id)
24
25# Start listening
26evs.start()
27
28# Do stuff
29
30# Stop listening
31evs.stop()
Dual Head example¶
1"""Dual camera example.
2
3This is an example of using a Qtec dual head camera setup, in which the the
4secondary head (/dev/qtec/video1) is triggered by the main head, in order to
5have the frames from the two sensors be syncronized. The example is based on
6having one RGB sensor and one SWIR sensor.
7"""
8
9import cv2
10
11import qamlib
12
13
14SAVE_DIR = "/home/root/test"
15
16
17# Frame capture
18def stream_on(rgb_cam, swir_cam):
19 """Start both camera streams."""
20 i1 = 0
21 i2 = 0
22
23 print("Starting Streaming")
24 try:
25 rgb_cam.start()
26 swir_cam.start()
27 while True:
28 # Using 'buffered=True' will return the next frame in the queue
29 # and throw a 'DroppedFrameException' if the queue gets filled up
30 # set 'overflow_exception=False' in the Camera constructor
31 # if you want to disable this exception, but then it will be necessary
32 # to check for dropped frames using the sequence nr from the metadata
33 rgb_meta, rgb_frame = rgb_cam.get_frame(timeout=1, buffered=True)
34 swir_meta, swir_frame = swir_cam.get_frame(timeout=1, buffered=True)
35
36 # all this sequence nr checks shouldn't be necessary
37 # since we will get a 'DroppedFrameException' if frames are dropped
38 # but are present to illustrate how to check for dropped frames
39 # under other circumstances.
40
41 n1 = rgb_meta.sequence - i1
42 if n1 > 0:
43 print(f"rgb_cam skipped {n1} frames")
44 break
45 i1 = rgb_meta.sequence + 1
46
47 n2 = swir_meta.sequence - i2
48 if n2 > 0:
49 print(f"swir_cam skipped {n2} frames")
50 break
51 i2 = swir_meta.sequence + 1
52
53 if rgb_meta.sequence != swir_meta.sequence:
54 print(
55 f"Error, heads are out of sync: {rgb_meta.sequence} != "
56 f"{swir_meta.sequence}"
57 )
58 break
59
60 if rgb_meta.sequence % fps == 0:
61 print(f"rgb_cam: {rgb_meta.sequence}")
62
63 if swir_meta.sequence % fps == 0:
64 print(f"swir_cam: {swir_meta.sequence}")
65
66 cv2.imwrite(f"{SAVE_DIR}/rgb_{rgb_meta.sequence:09d}.png", rgb_frame)
67 cv2.imwrite(f"{SAVE_DIR}/swir_{swir_meta.sequence:09d}.png", swir_frame)
68 except Exception as e:
69 template = "An exception of type {0} occurred. Arguments:\n{1!r}"
70 message = template.format(type(e).__name__, e.args)
71 print(message)
72 finally:
73 rgb_cam.stop()
74 swir_cam.stop()
75
76 print("Done Streaming")
77
78
79if __name__ == "__main__":
80 # Open video capture device
81 try:
82 rgb_cam = qamlib.Camera("/dev/qtec/video0")
83 swir_cam = qamlib.Camera("/dev/qtec/video1")
84 except Exception as e:
85 print(e)
86 exit(-1)
87
88 # V4l2 "Settings"
89 fps = 5.0
90
91 # Use BGR so we don't have to do the conversion to save with OpenCV
92 rgb_cam.set_format("BGR3")
93 rgb_cam.set_framerate(fps)
94
95 swir_cam.set_format("GREY")
96 # set swir head for "external trigger" so the other head drives it (fps)
97 swir_cam.set_control("trigger mode", 1)
98
99 img_format1 = rgb_cam.get_format()
100 px_format1 = img_format1.pixelformat
101
102 img_format2 = swir_cam.get_format()
103 px_format2 = img_format2.pixelformat
104
105 print(f"FPS: {rgb_cam.get_framerate()}")
106 print(f"Frame Size: {rgb_cam.get_resolution()} , {swir_cam.get_resolution()}")
107 print(f"Crop: {rgb_cam.get_crop()} , {swir_cam.get_crop()}")
108 print(f"Pixel format: {img_format1} , {img_format2}")
109 print("\n")
110
111 # adjust exposure time
112 rgb_cam.set_control("Exposure Time, Absolute", 5000)
113 swir_cam.set_control("Exposure Time, Absolute", 5000)
114 print(
115 f"Exposure: {rgb_cam.get_control('Exposure Time, Absolute')} , "
116 f"{swir_cam.get_control('Exposure Time, Absolute')}"
117 )
118
119 # restart streaming on errors
120 while True:
121 stream_on(rgb_cam, swir_cam)