Skip to content

Commit e451875

Browse files
committed
Added playback scripts, realsense intrincis param code, fixes
1 parent d32e9ec commit e451875

File tree

5 files changed

+114
-23
lines changed

5 files changed

+114
-23
lines changed

Scripts/camera_playback.py

+35
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
import numpy as np
2+
import cv2
3+
import time
4+
5+
6+
recording = []
7+
with open("2023_02_15_empty_belt_recording.npy", "rb") as f:
8+
recording = np.load(f)
9+
10+
frame_count = recording.shape[3]
11+
frame_index = 1
12+
fps = 9
13+
14+
while True:
15+
rgb_frame = recording[:, :, 0:3, frame_index].astype(np.uint8)
16+
depth_frame = recording[:, :, 3, frame_index].astype(np.uint16)
17+
18+
clahe = cv2.createCLAHE(clipLimit=20.0, tileGridSize=(5, 5))
19+
depth_frame_hist = clahe.apply(depth_frame.astype(np.uint8))
20+
cv2_colorized_depth = cv2.applyColorMap(depth_frame_hist, cv2.COLORMAP_JET)
21+
22+
frame_height, frame_width, frame_channel_count = rgb_frame.shape
23+
cv2.imshow("RGB Frame", rgb_frame)
24+
cv2.imshow("Depth Frame", cv2_colorized_depth)
25+
26+
frame_index += 1
27+
time.sleep(1 / fps)
28+
29+
if frame_index >= frame_count:
30+
print("Recording ended, Starting over...")
31+
frame_index = 1
32+
33+
key = cv2.waitKey(1)
34+
if key == 27: # 'Esc'
35+
break

Scripts/camera_record.py

+45
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
from robot_cell.detection.realsense_depth import DepthCamera
2+
import numpy as np
3+
import cv2
4+
import time
5+
6+
camera = DepthCamera(config_path="./config/D435_camera_config.json")
7+
just_started = True
8+
max_num_frames = 100
9+
_, depth_frame, rgb_frame, _ = camera.get_frames()
10+
frame_height, frame_width, frame_channel_count = rgb_frame.shape
11+
12+
recording = np.empty(
13+
(frame_height, frame_width, frame_channel_count + 1, max_num_frames), dtype=np.uint16
14+
)
15+
16+
depth_frame = np.expand_dims(depth_frame, axis=2)
17+
recording[:, :, :, 0] = np.concatenate((rgb_frame.astype(np.uint16), depth_frame), axis=2)
18+
19+
time.sleep(5)
20+
21+
frm_num = 1
22+
while True:
23+
start_time = time.time()
24+
success, depth_frame, rgb_frame, _ = camera.get_frames()
25+
if not success:
26+
continue
27+
28+
cv2.imshow("RGB Frame", rgb_frame)
29+
30+
depth_frame = np.expand_dims(depth_frame, axis=2)
31+
recording[:, :, :, frm_num] = np.concatenate((rgb_frame.astype(np.uint16), depth_frame), axis=2)
32+
33+
end_time = time.time()
34+
duration = end_time - start_time
35+
print("Time:", duration, "; Frame number:", frm_num)
36+
37+
key = cv2.waitKey(1)
38+
frm_num += 1
39+
if key == 27 or frm_num >= max_num_frames: # 'Esc'
40+
print("Saving to file...")
41+
with open("recording.npy", "wb") as f:
42+
np.save(f, recording)
43+
camera.release()
44+
print("Finished")
45+
break

cv_pick_place/robot_cell/control/robot_communication.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -49,11 +49,11 @@ def get_nodes(self):
4949
self.Safe_Operational_Stop = self.client.get_node(
5050
'ns=3;s="Robot_Data"."Status"."Safe_Operational_Stop"')
5151
self.Conveyor_Left = self.client.get_node(
52-
'ns=3;s="Program_Data"."Conveyor_Left_Toggle"')
52+
'ns=3;s="Program_Data"."OPCUA"."Conveyor_Left"')
5353
self.Conveyor_Right = self.client.get_node(
54-
'ns=3;s="Program_Data"."Conveyor_Right_Toggle"')
54+
'ns=3;s="Program_Data"."OPCUA"."Conveyor_Right"')
5555
self.Gripper_State = self.client.get_node(
56-
'ns=3;s="Program_Data"."Gripper_Toggle"')
56+
'ns=3;s="Program_Data"."OPCUA"."Gripper"')
5757
self.Encoder_Vel = self.client.get_node(
5858
'ns=3;s="Encoder_1".ActualVelocity')
5959
self.Encoder_Pos = self.client.get_node(

cv_pick_place/robot_cell/control/robot_control.py

+17-19
Original file line numberDiff line numberDiff line change
@@ -103,37 +103,35 @@ def close_program(self):
103103

104104
self.client.disconnect()
105105

106-
def change_gripper_state(self, state: bool):
106+
def change_gripper_state(self):
107107
"""
108108
Switch gripper on/off.
109-
110-
Args:
111-
state (bool): If the gripper should be turned on or off.
112109
"""
113110

114-
self.Gripper_State.set_value(opcua.ua.DataValue(state))
111+
self.Gripper_State.set_value(opcua.ua.DataValue(True))
112+
self.Gripper_State.set_value(opcua.ua.DataValue(False))
115113
if self.verbose:
116-
print("[INFO] Gripper state is {}.".format(state))
114+
print("[INFO] Toggled gripper.")
117115

118-
def change_conveyor_right(self, conv_right: bool):
116+
def change_conveyor_right(self):
119117
"""
120118
Switch conveyor right direction on/off.
121-
122-
Args:
123-
conv_right (bool): If the conveyor should be turned on or off in the right direction.
124119
"""
125120

126-
self.Conveyor_Right.set_value(opcua.ua.DataValue(conv_right))
121+
self.Conveyor_Right.set_value(opcua.ua.DataValue(True))
122+
self.Conveyor_Right.set_value(opcua.ua.DataValue(False))
123+
if self.verbose:
124+
print("[INFO] Toggled conveyor belt.")
127125

128-
def change_conveyor_left(self, conv_left: bool):
126+
def change_conveyor_left(self):
129127
"""
130128
Switch conveyor left direction on/off.
131-
132-
Args:
133-
conv_left (bool): If the conveyor should be turned on or off in the left direction.
134129
"""
135130

136-
self.Conveyor_Left.set_value(opcua.ua.DataValue(conv_left))
131+
self.Conveyor_Left.set_value(opcua.ua.DataValue(True))
132+
self.Conveyor_Left.set_value(opcua.ua.DataValue(False))
133+
if self.verbose:
134+
print("[INFO] Toggled conveyor belt.")
137135

138136
def set_trajectory(
139137
self,
@@ -329,13 +327,13 @@ def control_server(self, pipe: multiprocessing.connection.PipeConnection):
329327
data = input.data
330328

331329
if command == RcCommand.GRIPPER:
332-
self.change_gripper_state(data)
330+
self.change_gripper_state()
333331

334332
elif command == RcCommand.CONVEYOR_LEFT:
335-
self.change_conveyor_left(data)
333+
self.change_conveyor_left()
336334

337335
elif command == RcCommand.CONVEYOR_RIGHT:
338-
self.change_conveyor_right(data)
336+
self.change_conveyor_right()
339337

340338
elif command == RcCommand.CONTINUE_PROGRAM:
341339
self.continue_program()

cv_pick_place/robot_cell/detection/realsense_depth.py

+14-1
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,17 @@ def __init__(
135135
# Start video stream
136136
self.profile = self.pipeline.start(self.config)
137137

138+
# Code for getting camera intrinsic parameters
139+
# self.stream_profile_color = self.profile.get_stream(rs.stream.color, 0)
140+
# self.stream_profile_depth = self.profile.get_stream(rs.stream.depth, 0)
141+
# self.video_stream_profile = self.stream_profile_color.as_video_stream_profile()
142+
# ; = self.video_stream_profile.get_intrinsics()
143+
# print("fx =", intrinsics.fx, "; fy =", intrinsics.fy)
144+
# print("ppx =", intrinsics.ppx, "; ppy =", intrinsics.ppy)
145+
# print("height =", intrinsics.height, "; width =", intrinsics.width)
146+
# print("coeffs =", intrinsics.coeffs)
147+
# print("model =", intrinsics.model)
148+
138149
def get_frames(self) -> tuple[bool, np.ndarray, np.ndarray, np.ndarray]:
139150
"""
140151
Reads and processes frames from connected camera.
@@ -160,7 +171,9 @@ def get_frames(self) -> tuple[bool, np.ndarray, np.ndarray, np.ndarray]:
160171
# Apply hole filling filter
161172
depth_frame = self.hole_filling.process(depth_frame)
162173

163-
colorized_depth_frame = np.asanyarray(self.colorizer.colorize(depth_frame).get_data())
174+
colorized_depth_frame = np.asanyarray(
175+
self.colorizer.colorize(depth_frame).get_data()
176+
)
164177
depth_frame = np.asanyarray(depth_frame.get_data())
165178
color_frame = np.asanyarray(color_frame.get_data())
166179

0 commit comments

Comments
 (0)