Merge branch 'task-3' into main

This commit is contained in:
havoc420ubuntu 2025-05-28 07:11:49 +00:00
commit 477511f291
18 changed files with 39274 additions and 17 deletions

4981
Gait_Params_up_full.toml Normal file

File diff suppressed because it is too large Load Diff

View File

@ -637,3 +637,16 @@ def go_to_y_v2(ctrl, msg, target_y, speed=0.5, precision=True, observe=False):
"""
target_x = ctrl.odo_msg.xyz[0]
return go_to_xy_v2(ctrl, msg, target_x, target_y, speed, precision, observe)
def go_to_x_v2(ctrl, msg, target_x, speed=0.5, precision=True, observe=False):
"""
控制机器人移动到指定的x坐标位置使用直接x速度控制
参数:
ctrl: Robot_Ctrl 对象包含里程计信息
msg: robot_control_cmd_lcmt 对象用于发送命令
target_y: 目标Y坐标()
speed: 行走速度(/)范围0.1~1.0默认为0.5
"""
target_y = ctrl.odo_msg.xyz[1]
return go_to_xy_v2(ctrl, msg, target_x, target_y, speed, precision, observe)

View File

@ -28,3 +28,36 @@
2025-05-26 00:25:13 | INFO | utils.log_helper - 保存原始图像到: logs/image/original_20250526_002513_050661.jpg
2025-05-26 00:25:13 | INFO | utils.log_helper - 保存左侧轨迹线检测结果图像到: logs/image/left_track_20250526_002513_050661.jpg
2025-05-26 00:25:13 | INFO | utils.log_helper - 左侧轨迹线检测结果: {'timestamp': '20250526_002513_050661', 'tracking_point': (549, 1071), 'ground_intersection': (543, 1080), 'distance_to_left': 584.5, 'slope': -1.619718309859155, 'line_mid_x': 584.5}
2025-05-26 14:57:53 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145753_084929.jpg
2025-05-26 14:57:53 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145753_163732.jpg
2025-05-26 14:57:53 | INFO | utils.log_helper - 保存横向边缘检测结果图像到: logs/image/horizontal_edge_20250526_145753_163732.jpg
2025-05-26 14:57:53 | INFO | utils.log_helper - 横向边缘检测结果: {'timestamp': '20250526_145753_163732', 'edge_point': (1040, 790), 'distance_to_center': 80, 'slope': -0.03763440860215054, 'distance_to_bottom': 286.98924731182797, 'intersection_point': (960, 793), 'score': 0.5265099443030505, 'valid': False, 'reason': '边缘点y坐标超出有效范围; ', 'is_upper_line': False}
2025-05-26 14:58:23 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145823_169420.jpg
2025-05-26 14:58:23 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145823_249269.jpg
2025-05-26 14:58:23 | INFO | utils.log_helper - 保存横向边缘检测结果图像到: logs/image/horizontal_edge_20250526_145823_249269.jpg
2025-05-26 14:58:23 | INFO | utils.log_helper - 横向边缘检测结果: {'timestamp': '20250526_145823_249269', 'edge_point': (973, 960), 'distance_to_center': 13, 'slope': -0.07112526539278131, 'distance_to_bottom': 119.07537154989382, 'intersection_point': (960, 960), 'score': 0.38712268929341453, 'valid': False, 'reason': '边缘点y坐标超出有效范围; ', 'is_upper_line': False}
2025-05-26 14:58:33 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145833_166005.jpg
2025-05-26 14:58:33 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145833_219478.jpg
2025-05-26 14:58:33 | INFO | utils.log_helper - 保存横向边缘检测结果图像到: logs/image/horizontal_edge_20250526_145833_219478.jpg
2025-05-26 14:58:33 | INFO | utils.log_helper - 横向边缘检测结果: {'timestamp': '20250526_145833_219478', 'edge_point': (973, 960), 'distance_to_center': 13, 'slope': -0.07112526539278131, 'distance_to_bottom': 119.07537154989382, 'intersection_point': (960, 960), 'score': 0.38712268929341453, 'valid': False, 'reason': '边缘点y坐标超出有效范围; ', 'is_upper_line': False}
2025-05-26 14:58:37 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145837_344953.jpg
2025-05-26 14:58:37 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145837_397536.jpg
2025-05-26 14:58:37 | INFO | utils.log_helper - 保存横向边缘检测结果图像到: logs/image/horizontal_edge_20250526_145837_397536.jpg
2025-05-26 14:58:37 | INFO | utils.log_helper - 横向边缘检测结果: {'timestamp': '20250526_145837_397536', 'edge_point': (973, 960), 'distance_to_center': 13, 'slope': -0.07112526539278131, 'distance_to_bottom': 119.07537154989382, 'intersection_point': (960, 960), 'score': 0.38712268929341453, 'valid': False, 'reason': '边缘点y坐标超出有效范围; ', 'is_upper_line': False}
2025-05-26 14:58:43 | DEBUG | utils.log_helper - 🐞 步骤1: 原始图像已加载
2025-05-26 14:58:44 | DEBUG | utils.log_helper - 🐞 步骤2: 创建黄色掩码
2025-05-26 14:58:45 | DEBUG | utils.log_helper - 🐞 步骤3: 提取黄色部分
2025-05-26 14:58:46 | DEBUG | utils.log_helper - 🐞 正在处理底部边缘点
2025-05-26 14:58:47 | DEBUG | utils.log_helper - 🐞 显示拟合线段
2025-05-26 14:58:48 | DEBUG | utils.log_helper - 👁️ 步骤5: 找到边缘点 (320, 1009)
2025-05-26 14:58:48 | DEBUG | utils.log_helper - 🐞 显示边缘斜率和中线交点
2025-05-26 14:58:49 | INFO | utils.log_helper - 保存原始图像到: logs/image/origin_horizontal_edge_20250526_145849_764575.jpg
2025-05-26 14:58:49 | INFO | utils.log_helper - 保存横向边缘检测结果图像到: logs/image/horizontal_edge_20250526_145849_764575.jpg
2025-05-26 14:58:49 | INFO | utils.log_helper - 横向边缘检测结果: {'timestamp': '20250526_145849_764575', 'edge_point': (320, 1009), 'distance_to_center': -640, 'slope': -0.07331047777324741, 'distance_to_bottom': 117.91870577487839, 'intersection_point': (960, 962)}
2025-05-26 14:59:08 | DEBUG | utils.log_helper - 🐞 步骤1: 创建黄色掩码
2025-05-26 14:59:09 | DEBUG | utils.log_helper - 🐞 步骤1.5: 底部区域掩码
2025-05-26 14:59:10 | DEBUG | utils.log_helper - 🐞 步骤2: 边缘检测
2025-05-26 14:59:11 | DEBUG | utils.log_helper - 🐞 步骤3: 检测到 65 条直线
2025-05-26 14:59:12 | DEBUG | utils.log_helper - 🐞 步骤4: 找到 8 条垂直线
2025-05-26 14:59:14 | INFO | utils.log_helper - 保存双轨迹线检测结果图像到: logs/image/dual_track_20250526_145914_870232.jpg
2025-05-26 14:59:14 | INFO | utils.log_helper - 双轨迹线检测结果: {'timestamp': '20250526_145914_870232', 'center_point': (834, 1080), 'deviation': -126, 'left_track_mid_x': 397.0, 'right_track_mid_x': 1351.5, 'track_width': 954.5, 'center_slope': -2.8529411764705883}

38
main.py
View File

@ -18,8 +18,12 @@ from utils.robot_control_response_lcmt import robot_control_response_lcmt
from utils.localization_lcmt import localization_lcmt
from utils.image_raw import ImageProcessor
# from utils.marker_client import MarkerRunner
# from utils.marker_client import MarkerRunner
from utils.base_msg import BaseMsg
from task_1.task_1 import run_task_1
from task_2_5.task_2_5 import run_task_2_5
from task_3.task_3 import run_task_3
from task_1.task_1 import run_task_1, run_task_1_back
from task_2.task_2 import run_task_2, run_task_2_back
from task_2_5.task_2_5 import run_task_2_5, run_task_2_5_back
@ -44,7 +48,7 @@ def main():
Ctrl.base_msg.stand_up()
Ctrl.base_msg.stop() # BUG 垃圾指令 for eat
# time.sleep(100) # TEST
# time.sleep(100) # TEST,
# run_task_1(Ctrl, msg)
arrow_direction = run_task_2(Ctrl, msg, xy_flag=False)
@ -52,6 +56,13 @@ def main():
print(f"arrow_direction: {arrow_direction}")
run_task_2_5(Ctrl, msg, direction=arrow_direction)
# 执行任务3
run_task_3(Ctrl, msg)
# run_task_test(Ctrl, msg)
# time.sleep(100)
if arrow_direction == 'left':
run_task_4(Ctrl, msg)
else:
@ -98,6 +109,7 @@ class Robot_Ctrl(object):
self.image_processor = ImageProcessor()
# DEBUG
# self.marker_runner = MarkerRunner(pass_flag=pass_marker)
# self.marker_runner = MarkerRunner(pass_flag=pass_marker)
self.send_lock = Lock()
self.delay_cnt = 0
self.mode_ok = 0
@ -118,6 +130,7 @@ class Robot_Ctrl(object):
self.odo_thread.start()
self.image_processor.run()
# self.marker_runner.run()
# self.marker_runner.run()
def msg_handler(self, channel, data):
self.rec_msg = robot_control_response_lcmt().decode(data)
@ -129,18 +142,18 @@ class Robot_Ctrl(object):
def msg_handler_o(self, channel, data):
self.odo_msg = localization_lcmt().decode(data)
# 如果尚未校准,记录第一帧数据作为校准基准
if not self.is_calibrated:
self.calibration_offset = self.odo_msg.xyz
self.is_calibrated = True
print(f"校准完成,基准值: {self.calibration_offset}")
# if not self.is_calibrated:
# self.calibration_offset = self.odo_msg.xyz
# self.is_calibrated = True
# print(f"校准完成,基准值: {self.calibration_offset}")
# # 将接收到的 odo 数据减去校准基准值
self.odo_msg.xyz = [
self.odo_msg.xyz[0] - self.calibration_offset[0],
self.odo_msg.xyz[1] - self.calibration_offset[1],
self.odo_msg.xyz[2] - self.calibration_offset[2]
]
# if self.odo_msg.timestamp % 50 == 0:
# print(self.odo_msg.rpy)
# self.odo_msg.xyz = [
# self.odo_msg.xyz[0] - self.calibration_offset[0],
# self.odo_msg.xyz[1] - self.calibration_offset[1],
# self.odo_msg.xyz[2] - self.calibration_offset[2]
# ]
# if self.odo_msg.timestamp % 100 == 0:
# print(self.odo_msg.xyz, self.odo_msg.rpy, self.odo_msg.vxyz, self.odo_msg.omegaBody, self.odo_msg.vBody)
def odo_reset(self):
self.calibration_offset = self.odo_msg.xyz
@ -183,6 +196,7 @@ class Robot_Ctrl(object):
self.send_lock.release()
def place_marker(self, x, y, z, color, observe=False):
return None
return None
"""调用 MarkerRunner 放置标志物"""
if self.marker_runner is None or self.marker_runner.marker_client is None:

5782
task_3/Gait_Def_up.toml Normal file

File diff suppressed because it is too large Load Diff

8738
task_3/Gait_Params_down.toml Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

8738
task_3/Gait_Params_up.toml Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

44
task_3/Usergait_List.toml Normal file
View File

@ -0,0 +1,44 @@
[[step]]
acc_des = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
contact = 0
ctrl_point = [0.0, 0.0, 0.0]
duration = 0 # Expected execution time of Position interpolation control, For recovery stand need > 5.0S
foot_pose = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
gait_id = 0
life_count = 0 #Fake value
mode = 12
pos_des = [0.0, 0.0, 0.0]
rpy_des = [0.0, 0.0, 0.0]
step_height = [0.0, 0.0]
value = 0
vel_des = [0.0, 0.0, 0.0]
[[step]]
acc_des = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
contact = 15
ctrl_point = [0.0, 0.0, 0.0]
duration = 0
foot_pose = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
gait_id = 110
life_count = 0
mode = 62 # User define gait
pos_des = [0.0, 0.0, 0.0]
rpy_des = [0.0, 0.0, 0.0]
step_height = [0.0, 0.0]
value = 0
vel_des = [0.0, 0.0, 0.0] # velocity of x y yaw
# [[step]]
# acc_des = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
# contact = 0
# ctrl_point = [0.0, 0.0, 0.0]
# duration = 1000
# foot_pose = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
# gait_id = 1 #采用受控趴下
# life_count = 0
# mode = 7 #Puredamper
# pos_des = [0.0, 0.0, 0.0]
# rpy_des = [0.0, 0.0, 0.0]
# step_height = [0.0, 0.0]
# value = 0
# vel_des = [0.0, 0.0, 0.0]

67
task_3/file_send_lcmt.py Executable file
View File

@ -0,0 +1,67 @@
"""LCM type definitions This file automatically generated by lcm.
struct file_send_lcmt {
string data;
}
"""
try:
import cStringIO.StringIO as BytesIO
except ImportError:
from io import BytesIO
import struct
class file_send_lcmt(object):
__slots__ = ["data"]
__typenames__ = ["string"]
__dimensions__ = [None]
def __init__(self):
self.data = ""
def encode(self):
buf = BytesIO()
buf.write(file_send_lcmt._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
__data_encoded = self.data.encode('utf-8')
buf.write(struct.pack('>I', len(__data_encoded)+1))
buf.write(__data_encoded)
buf.write(b"\0")
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = BytesIO(data)
if buf.read(8) != file_send_lcmt._get_packed_fingerprint():
raise ValueError("Decode error")
return file_send_lcmt._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = file_send_lcmt()
__data_len = struct.unpack('>I', buf.read(4))[0]
self.data = buf.read(__data_len)[:-1].decode('utf-8', 'replace')
return self
_decode_one = staticmethod(_decode_one)
def _get_hash_recursive(parents):
if file_send_lcmt in parents: return 0
tmphash = (0x90df9b84cdceaf0a) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if file_send_lcmt._packed_fingerprint is None:
file_send_lcmt._packed_fingerprint = struct.pack(">Q", file_send_lcmt._get_hash_recursive([]))
return file_send_lcmt._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
def get_hash(self):
"""Get the LCM hash of the struct"""
return struct.unpack(">Q", file_send_lcmt._get_packed_fingerprint())[0]

98
task_3/main.py Executable file
View File

@ -0,0 +1,98 @@
'''
This demo show the communication interface of MR813 motion control board based on Lcm
- robot_control_cmd_lcmt.py
- file_send_lcmt.py
- Gait_Def_moonwalk.toml
- Gait_Params_moonwalk.toml
- Usergait_List.toml
'''
import lcm
import sys
import time
import toml
import copy
import math
from robot_control_cmd_lcmt import robot_control_cmd_lcmt
from file_send_lcmt import file_send_lcmt
robot_cmd = {
'mode':0, 'gait_id':0, 'contact':0, 'life_count':0,
'vel_des':[0.0, 0.0, 0.0],
'rpy_des':[0.0, 0.0, 0.0],
'pos_des':[0.0, 0.0, 0.0],
'acc_des':[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
'ctrl_point':[0.0, 0.0, 0.0],
'foot_pose':[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
'step_height':[0.0, 0.0],
'value':0, 'duration':0
}
def main():
lcm_cmd = lcm.LCM("udpm://239.255.76.67:7671?ttl=255")
lcm_usergait = lcm.LCM("udpm://239.255.76.67:7671?ttl=255")
usergait_msg = file_send_lcmt()
cmd_msg = robot_control_cmd_lcmt()
try:
steps = toml.load("Gait_Params_up.toml")
full_steps = {'step':[robot_cmd]}
k =0
for i in steps['step']:
cmd = copy.deepcopy(robot_cmd)
cmd['duration'] = i['duration']
if i['type'] == 'usergait':
cmd['mode'] = 11 # LOCOMOTION
cmd['gait_id'] = 110 # USERGAIT
cmd['vel_des'] = i['body_vel_des']
cmd['rpy_des'] = i['body_pos_des'][0:3]
cmd['pos_des'] = i['body_pos_des'][3:6]
cmd['foot_pose'][0:2] = i['landing_pos_des'][0:2]
cmd['foot_pose'][2:4] = i['landing_pos_des'][3:5]
cmd['foot_pose'][4:6] = i['landing_pos_des'][6:8]
cmd['ctrl_point'][0:2] = i['landing_pos_des'][9:11]
cmd['step_height'][0] = math.ceil(i['step_height'][0] * 1e3) + math.ceil(i['step_height'][1] * 1e3) * 1e3
cmd['step_height'][1] = math.ceil(i['step_height'][2] * 1e3) + math.ceil(i['step_height'][3] * 1e3) * 1e3
cmd['acc_des'] = i['weight']
cmd['value'] = i['use_mpc_traj']
cmd['contact'] = math.floor(i['landing_gain'] * 1e1)
cmd['ctrl_point'][2] = i['mu']
if k == 0:
full_steps['step'] = [cmd]
else:
full_steps['step'].append(cmd)
k=k+1
f = open("Gait_Params_up_full.toml", 'w')
f.write("# Gait Params\n")
f.writelines(toml.dumps(full_steps))
f.close()
file_obj_gait_def = open("Gait_Def_up.toml",'r')
file_obj_gait_params = open("Gait_Params_up_full.toml",'r')
usergait_msg.data = file_obj_gait_def.read()
lcm_usergait.publish("user_gait_file",usergait_msg.encode())
time.sleep(0.5)
usergait_msg.data = file_obj_gait_params.read()
lcm_usergait.publish("user_gait_file",usergait_msg.encode())
time.sleep(0.1)
file_obj_gait_def.close()
file_obj_gait_params.close()
cmd_msg.mode = 62
cmd_msg.value = 0
cmd_msg.contact = 15
cmd_msg.gait_id = 110
cmd_msg.duration = 1000
cmd_msg.life_count += 1
for i in range(325): #15s Heat beat It is used to maintain the heartbeat when life count is not updated
lcm_cmd.publish("robot_control_cmd",cmd_msg.encode())
time.sleep( 0.2 )
except KeyboardInterrupt:
cmd_msg.mode = 7 #PureDamper before KeyboardInterrupt:
cmd_msg.gait_id = 0
cmd_msg.duration = 0
cmd_msg.life_count += 1
lcm_cmd.publish("robot_control_cmd",cmd_msg.encode())
pass
sys.exit()
if __name__ == '__main__':
main()

113
task_3/main_ori.py Executable file
View File

@ -0,0 +1,113 @@
'''
This demo show the communication interface of MR813 motion control board based on Lcm
- robot_control_cmd_lcmt.py
- file_send_lcmt.py
- Gait_Def_moonwalk.toml
- Gait_Params_moonwalk.toml
- Usergait_List.toml
'''
import lcm
import sys
import time
import toml
import copy
import math
from robot_control_cmd_lcmt import robot_control_cmd_lcmt
from file_send_lcmt import file_send_lcmt
robot_cmd = {
'mode':0, 'gait_id':0, 'contact':0, 'life_count':0,
'vel_des':[0.0, 0.0, 0.0],
'rpy_des':[0.0, 0.0, 0.0],
'pos_des':[0.0, 0.0, 0.0],
'acc_des':[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
'ctrl_point':[0.0, 0.0, 0.0],
'foot_pose':[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
'step_height':[0.0, 0.0],
'value':0, 'duration':0
}
def task_3_main():
lcm_cmd = lcm.LCM("udpm://239.255.76.67:7671?ttl=255")
lcm_usergait = lcm.LCM("udpm://239.255.76.67:7671?ttl=255")
usergait_msg = file_send_lcmt()
cmd_msg = robot_control_cmd_lcmt()
try:
steps = toml.load(f"Gait_Params_up.toml")
full_steps = {'step':[robot_cmd]}
k =0
for i in steps['step']:
cmd = copy.deepcopy(robot_cmd)
cmd['duration'] = i['duration']
if i['type'] == 'usergait':
cmd['mode'] = 11 # LOCOMOTION
cmd['gait_id'] = 110 # USERGAIT
cmd['vel_des'] = i['body_vel_des']
cmd['rpy_des'] = i['body_pos_des'][0:3]
cmd['pos_des'] = i['body_pos_des'][3:6]
cmd['foot_pose'][0:2] = i['landing_pos_des'][0:2]
cmd['foot_pose'][2:4] = i['landing_pos_des'][3:5]
cmd['foot_pose'][4:6] = i['landing_pos_des'][6:8]
cmd['ctrl_point'][0:2] = i['landing_pos_des'][9:11]
cmd['step_height'][0] = math.ceil(i['step_height'][0] * 1e3) + math.ceil(i['step_height'][1] * 1e3) * 1e3
cmd['step_height'][1] = math.ceil(i['step_height'][2] * 1e3) + math.ceil(i['step_height'][3] * 1e3) * 1e3
cmd['acc_des'] = i['weight']
cmd['value'] = i['use_mpc_traj']
cmd['contact'] = math.floor(i['landing_gain'] * 1e1)
cmd['ctrl_point'][2] = i['mu']
if k == 0:
full_steps['step'] = [cmd]
else:
full_steps['step'].append(cmd)
k=k+1
f = open(f"Gait_Params_up_full.toml", 'w')
f.write("# Gait Params\n")
f.writelines(toml.dumps(full_steps))
f.close()
file_obj_gait_def = open(f"Gait_Def_up.toml",'r')
file_obj_gait_params = open(f"Gait_Params_up_full.toml",'r')
usergait_msg.data = file_obj_gait_def.read()
lcm_usergait.publish("user_gait_file",usergait_msg.encode())
time.sleep(0.5)
usergait_msg.data = file_obj_gait_params.read()
lcm_usergait.publish("user_gait_file",usergait_msg.encode())
time.sleep(0.1)
file_obj_gait_def.close()
file_obj_gait_params.close()
user_gait_list = open(f"Usergait_List.toml",'r')
steps = toml.load(user_gait_list)
for step in steps['step']:
cmd_msg.mode = step['mode']
cmd_msg.value = step['value']
cmd_msg.contact = step['contact']
cmd_msg.gait_id = step['gait_id']
cmd_msg.duration = step['duration']
cmd_msg.life_count += 1
for i in range(3):
cmd_msg.vel_des[i] = step['vel_des'][i]
cmd_msg.rpy_des[i] = step['rpy_des'][i]
cmd_msg.pos_des[i] = step['pos_des'][i]
cmd_msg.acc_des[i] = step['acc_des'][i]
cmd_msg.acc_des[i+3] = step['acc_des'][i+3]
cmd_msg.foot_pose[i] = step['foot_pose'][i]
cmd_msg.ctrl_point[i] = step['ctrl_point'][i]
for i in range(2):
cmd_msg.step_height[i] = step['step_height'][i]
lcm_cmd.publish("robot_control_cmd",cmd_msg.encode())
time.sleep( 0.1 )
for i in range(325): #15s Heat beat It is used to maintain the heartbeat when life count is not updated
lcm_cmd.publish("robot_control_cmd",cmd_msg.encode())
time.sleep( 0.2 )
except KeyboardInterrupt:
cmd_msg.mode = 7 #PureDamper before KeyboardInterrupt:
cmd_msg.gait_id = 0
cmd_msg.duration = 0
cmd_msg.life_count += 1
lcm_cmd.publish("robot_control_cmd",cmd_msg.encode())
pass
sys.exit()
if __name__ == '__main__':
task_3_main()

149
task_3/robot_control_cmd_lcmt.py Executable file
View File

@ -0,0 +1,149 @@
# LCM type definitions This file automatically generated by lcm.
try:
import cStringIO.StringIO as BytesIO
except ImportError:
from io import BytesIO
import struct
class robot_control_cmd_lcmt(object):
__slots__ = ["mode", "gait_id", "contact", "life_count", "vel_des", "rpy_des", "pos_des", "acc_des", "ctrl_point", "foot_pose", "step_height", "value", "duration"]
__typenames__ = ["int8_t", "int8_t", "int8_t", "int8_t", "float", "float", "float", "float", "float", "float", "float", "int32_t", "int32_t"]
__dimensions__ = [None, None, None, None, [3], [3], [3], [6], [3], [6], [2], None, None]
def __init__(self):
self.mode = 0
self.gait_id = 0
self.contact = 0
self.life_count = 0
self.vel_des = [ 0.0 for dim0 in range(3) ]
self.rpy_des = [ 0.0 for dim0 in range(3) ]
self.pos_des = [ 0.0 for dim0 in range(3) ]
self.acc_des = [ 0.0 for dim0 in range(6) ]
self.ctrl_point = [ 0.0 for dim0 in range(3) ]
self.foot_pose = [ 0.0 for dim0 in range(6) ]
self.step_height = [ 0.0 for dim0 in range(2) ]
self.value = 0
self.duration = 0
def encode(self):
buf = BytesIO()
buf.write(robot_control_cmd_lcmt._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
buf.write(struct.pack(">bbbb", self.mode, self.gait_id, self.contact, self.life_count))
buf.write(struct.pack('>3f', *self.vel_des[:3]))
buf.write(struct.pack('>3f', *self.rpy_des[:3]))
buf.write(struct.pack('>3f', *self.pos_des[:3]))
buf.write(struct.pack('>6f', *self.acc_des[:6]))
buf.write(struct.pack('>3f', *self.ctrl_point[:3]))
buf.write(struct.pack('>6f', *self.foot_pose[:6]))
buf.write(struct.pack('>2f', *self.step_height[:2]))
buf.write(struct.pack(">ii", self.value, self.duration))
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = BytesIO(data)
if buf.read(8) != robot_control_cmd_lcmt._get_packed_fingerprint():
raise ValueError("Decode error")
return robot_control_cmd_lcmt._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = robot_control_cmd_lcmt()
self.mode, self.gait_id, self.contact, self.life_count = struct.unpack(">bbbb", buf.read(4))
self.vel_des = struct.unpack('>3f', buf.read(12))
self.rpy_des = struct.unpack('>3f', buf.read(12))
self.pos_des = struct.unpack('>3f', buf.read(12))
self.acc_des = struct.unpack('>6f', buf.read(24))
self.ctrl_point = struct.unpack('>3f', buf.read(12))
self.foot_pose = struct.unpack('>6f', buf.read(24))
self.step_height = struct.unpack('>2f', buf.read(8))
self.value, self.duration = struct.unpack(">ii", buf.read(8))
return self
_decode_one = staticmethod(_decode_one)
def _get_hash_recursive(parents):
if robot_control_cmd_lcmt in parents: return 0
tmphash = (0x476b61e296af96f5) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if robot_control_cmd_lcmt._packed_fingerprint is None:
robot_control_cmd_lcmt._packed_fingerprint = struct.pack(">Q", robot_control_cmd_lcmt._get_hash_recursive([]))
return robot_control_cmd_lcmt._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
def get_hash(self):
"""Get the LCM hash of the struct"""
return struct.unpack(">Q", robot_control_cmd_lcmt._get_packed_fingerprint())[0]
class robot_control_response_lcmt(object):
__slots__ = ["mode", "gait_id", "contact", "order_process_bar", "switch_status", "ori_error", "footpos_error", "motor_error"]
__typenames__ = ["int8_t", "int8_t", "int8_t", "int8_t", "int8_t", "int8_t", "int16_t", "int32_t"]
__dimensions__ = [None, None, None, None, None, None, None, [12]]
def __init__(self):
self.mode = 0
self.gait_id = 0
self.contact = 0
self.order_process_bar = 0
self.switch_status = 0
self.ori_error = 0
self.footpos_error = 0
self.motor_error = [ 0 for dim0 in range(12) ]
def encode(self):
buf = BytesIO()
buf.write(robot_control_response_lcmt._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
buf.write(struct.pack(">bbbbbbh", self.mode, self.gait_id, self.contact, self.order_process_bar, self.switch_status, self.ori_error, self.footpos_error))
buf.write(struct.pack('>12i', *self.motor_error[:12]))
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = BytesIO(data)
if buf.read(8) != robot_control_response_lcmt._get_packed_fingerprint():
raise ValueError("Decode error")
return robot_control_response_lcmt._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = robot_control_response_lcmt()
self.mode, self.gait_id, self.contact, self.order_process_bar, self.switch_status, self.ori_error, self.footpos_error = struct.unpack(">bbbbbbh", buf.read(8))
self.motor_error = struct.unpack('>12i', buf.read(48))
return self
_decode_one = staticmethod(_decode_one)
def _get_hash_recursive(parents):
if robot_control_response_lcmt in parents: return 0
tmphash = (0x485da98216eda8c7) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if robot_control_response_lcmt._packed_fingerprint is None:
robot_control_response_lcmt._packed_fingerprint = struct.pack(">Q", robot_control_response_lcmt._get_hash_recursive([]))
return robot_control_response_lcmt._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
def get_hash(self):
"""Get the LCM hash of the struct"""
return struct.unpack(">Q", robot_control_response_lcmt._get_packed_fingerprint())[0]

522
task_3/task_3.py Normal file
View File

@ -0,0 +1,522 @@
import time
import sys
import os
import toml
import copy
import math
import lcm
import numpy as np
import cv2
import tempfile
# 添加父目录到路径以便能够导入utils
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# 添加当前目录到路径确保可以找到local文件
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from utils.log_helper import LogHelper, get_logger, section, info, debug, warning, error, success, timing
from base_move.turn_degree import turn_degree, turn_degree_v2
from base_move.go_straight import go_straight
from base_move.go_to_xy import go_to_x_v2
from file_send_lcmt import file_send_lcmt
from utils.yellow_area_analyzer import analyze_yellow_area_ratio
# 创建本模块特定的日志记录器
logger = get_logger("任务3")
observe = True
robot_cmd = {
'mode':0, 'gait_id':0, 'contact':0, 'life_count':0,
'vel_des':[0.0, 0.0, 0.0],
'rpy_des':[0.0, 0.0, 0.0],
'pos_des':[0.0, 0.0, 0.0],
'acc_des':[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
'ctrl_point':[0.0, 0.0, 0.0],
'foot_pose':[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
'step_height':[0.0, 0.0],
'value':0, 'duration':0
}
def pass_up_down(ctrl, msg):
usergait_msg = file_send_lcmt()
lcm_usergait = lcm.LCM("udpm://239.255.76.67:7671?ttl=255")
try:
steps = toml.load("./task_3/Gait_Params_up.toml")
full_steps = {'step':[robot_cmd]}
k =0
for i in steps['step']:
cmd = copy.deepcopy(robot_cmd)
cmd['duration'] = i['duration']
if i['type'] == 'usergait':
cmd['mode'] = 11 # LOCOMOTION
cmd['gait_id'] = 110 # USERGAIT
cmd['vel_des'] = i['body_vel_des']
cmd['rpy_des'] = i['body_pos_des'][0:3]
cmd['pos_des'] = i['body_pos_des'][3:6]
cmd['foot_pose'][0:2] = i['landing_pos_des'][0:2]
cmd['foot_pose'][2:4] = i['landing_pos_des'][3:5]
cmd['foot_pose'][4:6] = i['landing_pos_des'][6:8]
cmd['ctrl_point'][0:2] = i['landing_pos_des'][9:11]
cmd['step_height'][0] = math.ceil(i['step_height'][0] * 1e3) + math.ceil(i['step_height'][1] * 1e3) * 1e3
cmd['step_height'][1] = math.ceil(i['step_height'][2] * 1e3) + math.ceil(i['step_height'][3] * 1e3) * 1e3
cmd['acc_des'] = i['weight']
cmd['value'] = i['use_mpc_traj']
cmd['contact'] = math.floor(i['landing_gain'] * 1e1)
cmd['ctrl_point'][2] = i['mu']
if k == 0:
full_steps['step'] = [cmd]
else:
full_steps['step'].append(cmd)
k=k+1
f = open("./task_3/Gait_Params_up_full.toml", 'w')
f.write("# Gait Params\n")
f.writelines(toml.dumps(full_steps))
f.close()
# pre
file_obj_gait_def = open("./task_3/Gait_Def_up.toml",'r')
file_obj_gait_params = open("./task_3/Gait_Params_up_full.toml",'r')
usergait_msg.data = file_obj_gait_def.read()
lcm_usergait.publish("user_gait_file",usergait_msg.encode())
time.sleep(0.5)
usergait_msg.data = file_obj_gait_params.read()
lcm_usergait.publish("user_gait_file",usergait_msg.encode())
time.sleep(0.1)
file_obj_gait_def.close()
file_obj_gait_params.close()
msg.mode = 62
msg.value = 0
msg.contact = 15
msg.gait_id = 110
msg.duration = 1000
msg.life_count += 1
# 参数设置
stable_count = 0 # 用于计数z轴稳定的次数
stable_threshold = 8 # 连续15次检测z轴不再增加则认为已经停止
z_speed_threshold = 0.01 # z轴速度阈值小于这个值认为已经停止爬升
climb_speed_threshold = 0.05 # 检测到开始爬坡的速度阈值
max_iterations = 250 # 最大循环次数,作为安全保障
min_iterations = 150 # 最小循环次数,作为安全保障
# 姿态判断参数
pitch_threshold = 0.05 # 俯仰角阈值(弧度)
angular_rate_threshold = 0.03 # 角速度阈值(弧度/秒)
# 阶段控制
climbing_detected = False # 是否检测到正在爬坡
# 高度变化记录
height_window = []
pitch_window = []
window_size = 8
# 记录起始姿态和高度
start_height = ctrl.odo_msg.xyz[2]
info(f"开始监测爬坡过程,初始高度: {start_height:.3f}", "监测")
for i in range(max_iterations):
# 发送控制命令维持心跳
ctrl.Send_cmd(msg)
# 获取当前状态数据
vz = ctrl.odo_msg.vxyz[2] # Z轴速度
current_height = ctrl.odo_msg.xyz[2] # 当前高度
current_pitch = ctrl.odo_msg.rpy[1] # 当前俯仰角
pitch_rate = ctrl.odo_msg.omegaBody[1] # 俯仰角速度
vbody_z = ctrl.odo_msg.vBody[2] # 机体坐标系Z速度
# 更新滑动窗口数据
height_window.append(current_height)
pitch_window.append(current_pitch)
if len(height_window) > window_size:
height_window.pop(0)
pitch_window.pop(0)
# 每10次迭代打印一次当前信息
if i % 10 == 0:
info(f"当前Z轴速度={vz:.3f}, 当前高度={current_height:.3f}, 俯仰角={current_pitch:.3f}, 角速度={pitch_rate:.3f}", "监测")
# 检测是否开始爬坡阶段
if not climbing_detected and vz > climb_speed_threshold:
climbing_detected = True
info(f"检测到开始爬坡Z轴速度: {vz:.3f}, 当前高度: {current_height:.3f}, 俯仰角: {current_pitch:.3f}", "监测")
# 多条件判断是否完成爬坡
if i > min_iterations and climbing_detected and len(height_window) == window_size:
# 计算高度和俯仰角的稳定性
height_std = np.std(height_window) # 高度标准差
pitch_std = np.std(pitch_window) # 俯仰角标准差
# 多条件综合判断
position_stable = abs(vz) < z_speed_threshold # 垂直速度稳定
attitude_stable = abs(current_pitch) < pitch_threshold # 俯仰角接近水平
angular_rate_stable = abs(pitch_rate) < angular_rate_threshold # 角速度稳定
height_stable = height_std < 0.01 # 高度变化小
pitch_stable = pitch_std < 0.01 # 俯仰角变化小
vbody_stable = abs(vbody_z) < 0.01 # 机体Z方向速度稳定
# 综合判断条件
if (position_stable and attitude_stable and angular_rate_stable) or \
(position_stable and height_stable and pitch_stable) or \
(vbody_stable and attitude_stable and height_stable):
stable_count += 1
if stable_count >= stable_threshold:
info(f"检测到已完成爬坡:\n - Z轴速度: {vz:.3f}\n - 俯仰角: {current_pitch:.3f}\n - 角速度: {pitch_rate:.3f}\n - 当前高度: {current_height:.3f}\n - 上升了: {current_height - start_height:.3f}", "监测")
break
else:
# 重置稳定计数
stable_count = 0
time.sleep(0.2)
except KeyboardInterrupt:
msg.mode = 7 #PureDamper before KeyboardInterrupt:
msg.gait_id = 0
msg.duration = 0
msg.life_count += 1
ctrl.Send_cmd(msg)
pass
section('任务3-2x = 2', "开始")
# msg.mode = 11 # Locomotion模式 # DEBUG
# msg.gait_id = 26 # 自变频步态
# msg.duration = 0 # wait next cmd
# msg.step_height = [0.06, 0.06] # 抬腿高度
# msg.vel_des = [0, 0.2, 0] # [前进速度, 侧向速度, 角速度]
# msg.life_count += 1
# ctrl.Send_cmd(msg)
time.sleep(1)
# go_to_x_v2(ctrl, msg, 2, speed=0.5, precision=True, observe=True)
section('任务3-3down', "完成")
try:
steps = toml.load("./task_3/Gait_Params_down.toml")
full_steps = {'step':[robot_cmd]}
k = 0
for i in steps['step']:
cmd = copy.deepcopy(robot_cmd)
cmd['duration'] = i['duration']
if i['type'] == 'usergait':
cmd['mode'] = 11 # LOCOMOTION
cmd['gait_id'] = 110 # USERGAIT
cmd['vel_des'] = i['body_vel_des']
cmd['rpy_des'] = i['body_pos_des'][0:3]
cmd['pos_des'] = i['body_pos_des'][3:6]
cmd['foot_pose'][0:2] = i['landing_pos_des'][0:2]
cmd['foot_pose'][2:4] = i['landing_pos_des'][3:5]
cmd['foot_pose'][4:6] = i['landing_pos_des'][6:8]
cmd['ctrl_point'][0:2] = i['landing_pos_des'][9:11]
cmd['step_height'][0] = math.ceil(i['step_height'][0] * 1e3) + math.ceil(i['step_height'][1] * 1e3) * 1e3
cmd['step_height'][1] = math.ceil(i['step_height'][2] * 1e3) + math.ceil(i['step_height'][3] * 1e3) * 1e3
cmd['acc_des'] = i['weight']
cmd['value'] = i['use_mpc_traj']
cmd['contact'] = math.floor(i['landing_gain'] * 1e1)
cmd['ctrl_point'][2] = i['mu']
if k == 0:
full_steps['step'] = [cmd]
else:
full_steps['step'].append(cmd)
k=k+1
f = open("./task_3/Gait_Params_down_full.toml", 'w')
f.write("# Gait Params\n")
f.writelines(toml.dumps(full_steps))
f.close()
# pre
file_obj_gait_def = open("./task_3/Gait_Def_up.toml",'r')
file_obj_gait_params = open("./task_3/Gait_Params_down_full.toml",'r')
usergait_msg.data = file_obj_gait_def.read()
lcm_usergait.publish("user_gait_file",usergait_msg.encode())
time.sleep(0.5)
usergait_msg.data = file_obj_gait_params.read()
lcm_usergait.publish("user_gait_file",usergait_msg.encode())
time.sleep(0.1)
file_obj_gait_def.close()
file_obj_gait_params.close()
msg.mode = 62
msg.value = 0
msg.contact = 15
msg.gait_id = 110
msg.duration = 1000
msg.life_count += 1
# 参数设置
stable_count = 0 # 用于计数z轴稳定的次数
stable_threshold = 10 # 连续10次检测z轴速度接近零则认为已经到达平地
z_speed_threshold = 0.005 # z轴速度阈值小于这个值认为已经停止下降
descent_speed_threshold = -0.05 # 检测到开始下坡的速度阈值(负值表示下降)
max_iterations = 250 # 最大循环次数,作为安全保障
min_iterations = 150 # 最小循环次数,确保有足够的时间开始动作
start_height = ctrl.odo_msg.xyz[2] # 记录起始高度
# 姿态判断参数
pitch_threshold = 0.05 # 俯仰角阈值(弧度)
angular_rate_threshold = 0.03 # 角速度阈值(弧度/秒)
# 阶段控制
descending_detected = False # 是否检测到正在下坡
flat_ground_detected = False # 是否检测到已到达平地
# 高度变化记录
height_window = []
pitch_window = []
window_size = 8
info(f"开始监测下坡过程,初始高度: {start_height}", "监测")
for i in range(max_iterations):
# 发送控制命令维持心跳
ctrl.Send_cmd(msg)
# 获取当前状态数据
vz = ctrl.odo_msg.vxyz[2] # Z轴速度
current_height = ctrl.odo_msg.xyz[2] # 当前高度
current_pitch = ctrl.odo_msg.rpy[1] # 当前俯仰角
pitch_rate = ctrl.odo_msg.omegaBody[1] # 俯仰角速度
vbody_z = ctrl.odo_msg.vBody[2] # 机体坐标系Z速度
# 更新滑动窗口数据
height_window.append(current_height)
pitch_window.append(current_pitch)
if len(height_window) > window_size:
height_window.pop(0)
pitch_window.pop(0)
# 每10次迭代打印一次当前信息
if observe and i % 10 == 0:
info(f"当前Z轴速度={vz:.3f}, 当前高度={current_height:.3f}, 俯仰角={current_pitch:.3f}, 角速度={pitch_rate:.3f}", "监测")
# 检测是否开始下坡阶段
if not descending_detected and vz < descent_speed_threshold:
descending_detected = True
info(f"检测到开始下坡Z轴速度: {vz:.3f}, 当前高度: {current_height:.3f}, 俯仰角: {current_pitch:.3f}", "监测")
# 多条件判断是否到达平地
if i > min_iterations and descending_detected and len(height_window) == window_size:
# 计算高度和俯仰角的稳定性
height_std = np.std(height_window) # 高度标准差
pitch_std = np.std(pitch_window) # 俯仰角标准差
# 多条件综合判断
position_stable = abs(vz) < z_speed_threshold # 垂直速度稳定
attitude_stable = abs(current_pitch) < pitch_threshold # 俯仰角接近水平
angular_rate_stable = abs(pitch_rate) < angular_rate_threshold # 角速度稳定
height_stable = height_std < 0.01 # 高度变化小
pitch_stable = pitch_std < 0.01 # 俯仰角变化小
vbody_stable = abs(vbody_z) < 0.01 # 机体Z方向速度稳定
# 综合判断条件
if (position_stable and attitude_stable and angular_rate_stable) or \
(position_stable and height_stable and pitch_stable) or \
(vbody_stable and attitude_stable and height_stable):
stable_count += 1
if stable_count >= stable_threshold:
info(f"检测到已到达平地:\n - Z轴速度: {vz:.3f}\n - 俯仰角: {current_pitch:.3f}\n - 角速度: {pitch_rate:.3f}\n - 高度: {current_height:.3f}\n - 下降了: {start_height - current_height:.3f}", "监测")
flat_ground_detected = True
break
else:
# 重置稳定计数
stable_count = 0
time.sleep(0.2)
if not flat_ground_detected:
info(f"达到最大循环次数,未能明确检测到到达平地。当前高度: {ctrl.odo_msg.xyz[2]:.3f}", "警告")
except KeyboardInterrupt:
msg.mode = 7 #PureDamper before KeyboardInterrupt:
msg.gait_id = 0
msg.duration = 0
msg.life_count += 1
ctrl.Send_cmd(msg)
pass
def go_until_yellow_area(ctrl, msg, yellow_ratio_threshold=0.15, speed=0.3, max_time=30, observe=True):
"""
控制机器人直走直到摄像头检测到黄色区域比例超过指定阈值后开始下降时停止
参数:
ctrl: Robot_Ctrl 对象包含里程计信息
msg: robot_control_cmd_lcmt 对象用于发送命令
yellow_ratio_threshold: 黄色区域占比阈值(0-1之间的浮点数)默认为0.15
speed: 前进速度(/)默认为0.3/
max_time: 最大行走时间()默认为30秒
observe: 是否输出中间状态信息和可视化结果默认为True
返回:
bool: 是否成功检测到黄色区域并停止
"""
section("开始直行寻找黄色区域", "黄色检测")
# 设置移动命令基本参数
msg.mode = 11 # Locomotion模式
msg.gait_id = 26 # 自变频步态
msg.duration = 0 # wait next cmd
msg.step_height = [0.06, 0.06] # 抬腿高度
msg.vel_des = [speed, 0, 0] # [前进速度, 侧向速度, 角速度]
# 记录起始时间和位置
start_time = time.time()
start_position = list(ctrl.odo_msg.xyz)
if observe:
info(f"开始寻找黄色区域,阈值: {yellow_ratio_threshold:.2%}", "启动")
debug(f"起始位置: {start_position}", "位置")
# 检测间隔
check_interval = 0.3 # 秒
last_check_time = 0
# 黄色区域监测变量
yellow_peak_detected = False # 是否检测到峰值
yellow_decreasing = False # 是否开始下降
max_yellow_ratio = 0.0 # 记录最大黄色区域占比
yellow_ratio_history = [] # 记录黄色区域占比历史
history_window_size = 5 # 历史窗口大小,用于平滑处理
try:
# 开始移动
msg.life_count += 1
ctrl.Send_cmd(msg)
# 持续检测黄色区域
while time.time() - start_time < max_time and not yellow_decreasing:
current_time = time.time()
# 定期发送移动命令保持移动状态
if current_time - last_check_time >= check_interval:
# 获取当前图像并保存到临时文件
current_image = ctrl.image_processor.get_current_image()
# 创建临时文件保存图像
with tempfile.NamedTemporaryFile(suffix='.jpg', delete=False) as temp_file:
temp_filename = temp_file.name
cv2.imwrite(temp_filename, current_image)
try:
# 分析图像中的黄色区域
yellow_ratio = analyze_yellow_area_ratio(temp_filename, debug=False, save_result=False)
# 添加到历史记录
yellow_ratio_history.append(yellow_ratio)
if len(yellow_ratio_history) > history_window_size:
yellow_ratio_history.pop(0)
# 计算平滑后的当前黄色占比(使用最近几次的平均值以减少噪声)
current_smooth_ratio = sum(yellow_ratio_history) / len(yellow_ratio_history)
# 计算已移动距离(仅用于显示)
current_position = ctrl.odo_msg.xyz
dx = current_position[0] - start_position[0]
dy = current_position[1] - start_position[1]
distance_moved = math.sqrt(dx*dx + dy*dy)
if observe:
info(f"当前黄色区域占比: {yellow_ratio:.2%}, 平滑值: {current_smooth_ratio:.2%}, 已移动: {distance_moved:.2f}", "检测")
# 检测是否达到阈值(开始监测峰值)
if current_smooth_ratio >= yellow_ratio_threshold:
# 更新最大值
if current_smooth_ratio > max_yellow_ratio:
max_yellow_ratio = current_smooth_ratio
if not yellow_peak_detected:
yellow_peak_detected = True
if observe:
info(f"黄色区域占比超过阈值,开始监测峰值", "检测")
# 检测是否开始下降
elif yellow_peak_detected and current_smooth_ratio < max_yellow_ratio * 0.9: # 下降到峰值的90%以下认为开始下降
yellow_decreasing = True
if observe:
success(f"检测到黄色区域占比开始下降,峰值: {max_yellow_ratio:.2%}, 当前: {current_smooth_ratio:.2%}", "检测")
finally:
# 删除临时文件
try:
os.unlink(temp_filename)
except:
pass
# 更新心跳
msg.life_count += 1
ctrl.Send_cmd(msg)
last_check_time = current_time
# 小间隔
time.sleep(0.05)
# 平滑停止
if yellow_decreasing:
if observe:
info("开始平滑停止", "停止")
# 先降低速度再停止,实现平滑停止
slowdown_steps = 5
for i in range(slowdown_steps, 0, -1):
slowdown_factor = i / slowdown_steps
msg.vel_des = [speed * slowdown_factor, 0, 0]
msg.life_count += 1
ctrl.Send_cmd(msg)
time.sleep(0.1)
# 完全停止
ctrl.base_msg.stop()
# 计算最终移动距离
final_position = ctrl.odo_msg.xyz
dx = final_position[0] - start_position[0]
dy = final_position[1] - start_position[1]
final_distance = math.sqrt(dx*dx + dy*dy)
if observe:
if yellow_decreasing:
success(f"成功检测到黄色区域峰值并停止,峰值占比: {max_yellow_ratio:.2%}, 总移动距离: {final_distance:.2f}", "完成")
else:
warning(f"未能在限定时间内检测到黄色区域峰值,总移动距离: {final_distance:.2f}", "超时")
return yellow_decreasing
except KeyboardInterrupt:
# 处理键盘中断
ctrl.base_msg.stop()
if observe:
warning("操作被用户中断", "中断")
return False
except Exception as e:
# 处理其他异常
ctrl.base_msg.stop()
if observe:
error(f"发生错误: {str(e)}", "错误")
return False
def run_task_3(ctrl, msg):
section('任务3步态切换', "启动")
info('开始执行任务3...', "启动")
turn_degree_v2(ctrl, msg, 90, absolute=True)
# section('任务3-1up', "开始")
# pass_up_down(ctrl, msg)
section('任务3-2yellow stop', "开始")
go_until_yellow_area(ctrl, msg, yellow_ratio_threshold=0.15, speed=0.3)
# 原地站立3秒
section("原地站立3秒", "站立")
msg.mode = 11 # Locomotion模式
msg.gait_id = 26 # 自变频步态
msg.duration = 0 # wait next cmd
msg.step_height = [0.06, 0.06] # 抬腿高度
msg.vel_des = [0, 0, 0] # 零速度,原地站立
msg.life_count += 1
ctrl.Send_cmd(msg)
info("开始原地站立3秒", "站立")
time.sleep(3)
info("完成原地站立", "站立")

View File

@ -7,12 +7,15 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from base_move.turn_degree import turn_degree
from base_move.move_base_hori_line import follow_left_side_track
from base_move.go_straight import go_straight
from utils.log_helper import LogHelper, get_logger, section, info, debug, warning, error, success, timing
# 创建本模块特定的日志记录器
logger = get_logger("任务-test")
def run_task_test(ctrl, msg):
turn_degree(ctrl, msg, 90, absolute=True)
follow_left_side_track(ctrl, msg)
# turn_degree(ctrl, msg, 90, absolute=True)
# turn_degree(ctrl, msg, 90, absolute=True)
go_straight(ctrl, msg, distance=0.3, speed=0.1)
# follow_left_side_track(ctrl, msg)
# time.sleep(100)

View File

@ -17,7 +17,7 @@ def process_image(image_path, save_dir=None, show_steps=False):
# 检测赛道并估算距离
start_time = time.time()
edge_point, edge_info = detect_horizontal_track_edge_v2(image_path, observe=show_steps, save_log=True, delay=800)
edge_point, edge_info = detect_horizontal_track_edge(image_path, observe=show_steps, save_log=True, delay=800)
processing_time = time.time() - start_time
# 输出结果
@ -44,7 +44,7 @@ def process_image(image_path, save_dir=None, show_steps=False):
def main():
parser = argparse.ArgumentParser(description='黄色赛道检测演示程序')
parser.add_argument('--input', type=str, default='res/path/test-1.jpg', help='输入图像或视频的路径')
parser.add_argument('--input', type=str, default='res/path/image_20250513_162556.png', help='输入图像或视频的路径')
parser.add_argument('--output', type=str, default='res/path/test-v2/2-end.jpg', help='输出结果的保存路径')
parser.add_argument('--type', type=str, choices=['image', 'video'], help='输入类型,不指定会自动检测')
parser.add_argument('--show', default=True, action='store_true', help='显示处理步骤')

View File

@ -19,7 +19,7 @@ def detect_horizontal_track_edge(image, observe=False, delay=1000, save_log=True
edge_point: 赛道前方边缘点的坐标 (x, y)
edge_info: 边缘信息字典
"""
observe = False # TEST
# observe = False # TEST
# 如果输入是字符串(文件路径),则加载图像
if isinstance(image, str):
img = cv2.imread(image)