在先前配置好的机器臂教程接着下做

先是视觉方面demo

手势识别的mediapipe 是谷歌开源项目 这个的主要功能是识别手势 然后封装成类给下面的控制模块调用

import cv2
import mediapipe as mpclass HandDetector:def __init__(self, mode=False, maxHands=2, detectionCon=0.5, minTrackCon=0.5):self.mode = modeself.maxHands = maxHandsself.detectionCon = detectionConself.minTrackCon = minTrackConself.mpHands = mp.solutions.handsself.hands = self.mpHands.Hands(static_image_mode=self.mode, max_num_hands=self.maxHands,min_detection_confidence=self.detectionCon,min_tracking_confidence=self.minTrackCon)self.mpDraw = mp.solutions.drawing_utilsself.tipIds = [4, 8, 12, 16, 20]self.fingers = []self.lmList = []def findHands(self, img, draw=True, flipType=True):imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)self.results = self.hands.process(imgRGB)allHands = []h, w, c = img.shapeif self.results.multi_hand_landmarks:for handType, handLms in zip(self.results.multi_handedness, self.results.multi_hand_landmarks):myHand = {}## lmListmylmList = []xList = []yList = []for id, lm in enumerate(handLms.landmark):px, py, pz = int(lm.x * w), int(lm.y * h), int(lm.z * w)mylmList.append([px, py, pz])xList.append(px)yList.append(py)## bboxxmin, xmax = min(xList), max(xList)ymin, ymax = min(yList), max(yList)boxW, boxH = xmax - xmin, ymax - yminbbox = xmin, ymin, boxW, boxHcx, cy = bbox[0] + (bbox[2] // 2), \bbox[1] + (bbox[3] // 2)myHand["lmList"] = mylmListmyHand["bbox"] = bboxmyHand["center"] = (cx, cy)if flipType:if handType.classification[0].label == "Right":myHand["type"] = "Left"else:myHand["type"] = "Right"else:myHand["type"] = handType.classification[0].labelallHands.append(myHand)## drawif draw:self.mpDraw.draw_landmarks(img, handLms,self.mpHands.HAND_CONNECTIONS)cv2.rectangle(img, (bbox[0] - 20, bbox[1] - 20),(bbox[0] + bbox[2] + 20, bbox[1] + bbox[3] + 20),(255, 0, 255), 2)cv2.putText(img, myHand["type"], (bbox[0] - 30, bbox[1] - 30), cv2.FONT_HERSHEY_PLAIN,2, (255, 0, 255), 2)if draw:return allHands, imgelse:return allHandsdef fingersUp(self, myHand):"""判断多少个手指起来"""myHandType = myHand["type"]myLmList = myHand["lmList"]if self.results.multi_hand_landmarks:fingers = []# Thumbif myHandType == "Right":fingers.append(0)#右手if myLmList[self.tipIds[0]][0] > myLmList[self.tipIds[0] - 1][0]:fingers.append(1)else:fingers.append(0)else:fingers.append(1)#左手if myLmList[self.tipIds[0]][0] < myLmList[self.tipIds[0] - 1][0]:fingers.append(1)else:fingers.append(0)# 4 Fingersfor id in range(1, 5):if myLmList[self.tipIds[id]][1] < myLmList[self.tipIds[id] - 2][1]:fingers.append(1)else:fingers.append(0)return fingersdef main():cap = cv2.VideoCapture(0)detector = HandDetector(detectionCon=0.8, maxHands=2)while True:# Get image framesuccess, img = cap.read()# Find the hand and its landmarkshands, img = detector.findHands(img)  # with draw# hands = detector.findHands(img, draw=False)  # without drawif hands:# Hand 1hand1 = hands[0]lmList1 = hand1["lmList"]  # List of 21 Landmark pointshandType1 = hand1["type"]  # Handtype Left or Rightfingers1 = detector.fingersUp(hand1)print(fingers1)if len(hands) == 2:# Hand 2hand2 = hands[1]lmList2 = hand2["lmList"]  # List of 21 Landmark pointsbbox2 = hand2["bbox"]  # Bounding box info x,y,w,hcenterPoint2 = hand2['center']  # center of the hand cx,cyhandType2 = hand2["type"]  # Hand Type "Left" or "Right"fingers2 = detector.fingersUp(hand2)print(fingers2)# Displaycv2.imshow("Image", img)cv2.waitKey(1)if __name__ == "__main__":main()

控制关节点的python demo 模块

#! /usr/bin/python3.8
# -*- coding: utf-8 -*-
from operator import ge
from time import sleep
import HandTrackingModule as Hd
import rospy
import cv2
from sensor_msgs.msg import JointState
import sys, termiosmsg = """
Control Your Turtlebot!
---------------------------
Moving around:q    w    e   r   t   ya    s    d   f   g   hj/l : increase/decrease precision by 0.05
space key, k : reset
anything else : stop smoothly
b : switch to arm_four/arm_six
precision is not less than or equal to zero
CTRL-C to quit
"""mode = 0 #六自由度模式precision = 0.05 #默认精度(rad)#键值对应转动方向
rotateBindings = {'q':(1,1),#第一个关节顺时针'a':(1,-1),'w':(2,1),'s':(2,-1),'e':(3,1),'d':(3,-1),'r':(4,1),'f':(4,-1),'t':(5,1),'g':(5,-1),'y':(6,1),'h':(6,-1)}#键值对应精度增量
precisionBindings={'j':0.01,'l':-0.01}#以字符串格式返回当前控制精度
def prec(precision):return "currently:\tprecision %s " %precision
# 检测手势
def detect_hands_gesture(result):
#0 1 2 3 4 5 6
#7 8 9 10 11 12if result[0]==0:if (result[1:] == [0,1,0,0,0]):gesture = "a" #return  gestureelif (result[1:] == [0,1,1,0,0]):gesture = "s"return  gestureelif (result[1:] == [0,0,1,1,1]):gesture = "d"return  gestureelif (result[1:] == [0,1,1,1,1]):gesture = "f"return  gestureelif (result[1:] == [1,1,1,1,1]):gesture = "g"return  gestureelif (result[1:] == [1,0,0,0,1]):gesture = "h"return  gestureelif (result[1:] == [1,1,0,0,1]):gesture = " " #R&K手臂直立return  gestureelif result[0]==1:if (result[1:] == [0,1,0,0,0]):gesture = "q"return  gestureelif (result[1:] == [0,1,1,0,0]):gesture = "w"return  gestureelif (result[1:] == [0,0,1,1,1]):gesture = "e"return  gestureelif (result[1:] == [0,1,1,1,1]):gesture = "r"return  gestureelif (result[1:] == [1,1,1,1,1]):gesture = "t"return  gestureelif (result[1:] == [1,0,0,0,1]):#小拇指gesture = "y"return  gesture#主函数
if __name__=="__main__":settings = termios.tcgetattr(sys.stdin) #获取键值初始化,读取终端相关属性rospy.init_node('arm_teleop') #创建ROS节点pub = rospy.Publisher('/joint_states', JointState, queue_size=5) #创建机械臂状态话题发布者#关节1-6对应弧度状态joints = [0,0,0,0,0,0]"""机械臂关节初始化"""jointState = JointState() #创建ROS机械臂装态话题消息变量jointState.header.stamp = rospy.Time.now()jointState.name=["joint1","joint2","joint3","joint4","joint5","joint6"]jointState.position=jointspub.publish(jointState) #ROS发布机械臂状态话题# 接入USB摄像头时,注意修改cap设备的编号cap = cv2.VideoCapture(0)detector = Hd.HandDetector(detectionCon=0.8, maxHands=2)if not cap.isOpened():print("Can not open camera.")exit()try:print(msg) #打印控制说明print(prec(precision)) #打印当前精度key = " "while(1):# Get image framesuccess, img = cap.read()# Find the hand and its landmarkshands, img = detector.findHands(img)  # with draw# hands = detector.findHands(img, draw=False)  # without drawcv2.imshow("Image", img)cv2.waitKey(1)if hands :key  = detect_hands_gesture(detector.fingersUp(hands[0])) #判断键值是在控制机械臂运动的键值内if key in rotateBindings.keys():joints[rotateBindings[key][0]-1] = joints[rotateBindings[key][0]-1] + precision*rotateBindings[key][1]if joints[rotateBindings[key][0]-1]>1.57:joints[rotateBindings[key][0]-1]=1.57elif joints[rotateBindings[key][0]-1]<-1.57:joints[rotateBindings[key][0]-1]=-1.57#判断键值是否在精度增量键值内elif key in precisionBindings.keys():if (precision+precisionBindings[key])<0.01 or (precision+precisionBindings[key])>0.1:passelse:precision+=precisionBindings[key]print(prec(precision)) #精度发生变化,打印出来#根据机械臂自由度给joint_states话题赋予消息if mode:jointState.header.stamp = rospy.Time.now()jointState.name=["joint1","joint2","joint3","joint4"]jointState.position=joints[0:4]else:jointState.header.stamp = rospy.Time.now()jointState.name=["joint1","joint2","joint3","joint4","joint5","joint6"]jointState.position=jointspub.publish(jointState) #ROS发布关节状态话题 elif key ==" ":joints = [0,0,0,0,0,0]    #运行出现问题则程序终止并打印相关错误信息except Exception as e:print(e)#程序结束前发布速度为0的速度话题finally:print("Keyboard control off")#程序结束前打印消息提示termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)

感觉判断的地方有点小拉跨

所有的python 写完记得

sudo chmod 777 *.py

启动桌面机器人的基础服务

roslaunch table_arm base_serial.launch

启动

roslaunch table_arm arm_opencv_move.py

机器臂直立 R&K 直立

ros_opencv_机器爪控制-2_哔哩哔哩_bilibili

机器臂(二)--视觉控制关节点相关推荐

  1. 机械臂(一)---机器臂的启动

    1.功能包导入以后编译错误缺少什么包就安装什么包:(删除xf_mic_asr_offline文件) sudo apt-get install ros-melodic-包名 sudo apt-get i ...

  2. 使用TPC控制UR3机器臂python代码

    使用TPC控制UR3机器臂python代码   记录下自己学习UR3的过程 UR3的消息格式 代码   使用时机器臂IP:192.168.0.1,电脑IP:192.168.0.2.python版本:3 ...

  3. 2D平面上的二连杆机器臂反向运动模拟(matlab代码示例)

    2D平面上的二连杆机器臂反向运动模拟 matlab上先下载 robotics system toolbox工具箱 robot = rigidBodyTree('DataFormat','column' ...

  4. (Python)从零开始,简单快速学机器仿人视觉Opencv---运用二:物体检测

    教程: 博主之前写了22节关于使用OpenCV的教程,欢迎大家阅读: (Python)从零开始,简单快速学机器仿人视觉Opencv-第一节:OpenCV的图像读取显示及保存 (Python)从零开始, ...

  5. 【自动化毕业设计】基于机械视觉控制的板球控制装置

    [自动化毕业设计]基于机械视觉控制的板球控制装置 大四自动化专业毕业设计,断断续续制作了三个月,中途参加了校招和其他杂事,按照一般工作强度制作大概需要3个星期. [实现功能] 自平衡控制 定位控制 运 ...

  6. 遨博协作机器人ROS开发 - 机械臂语音交互控制

    目录 一.简介 二.环境版本 三.学习目标 四.知识储备 五.任务实施 六.任务拓展 七.课堂小结 八.课后练习 一.简介 大家好,欢迎关注遨博学院带来的系列技术分享文章(协作机器人ROS开发),在前 ...

  7. 让机器“解疑释惑”:视觉世界中的结构化理解|VALSE2018之八

    编者按:据传宋徽宗赵佶曾以"深山藏古寺"为题命人作画,夺魁的画作,画崇山峻岭之中,一股清泉飞流直下,跳珠溅玉,泉边有位老态龙钟的和尚,正舀着泉水倒进桶里. 这幅画的妙处在于,从&q ...

  8. python代码控制机械臂_选用什么样的系统或平台开发机器人/机器臂?

    公众号已经更新好了,比下面的内容增加了视频.动图.具体更多解释,欢迎阅读机器人操作系统ROS.下面内容就不用看了,直接点赞就好啦~ ======== 谢邀,一直没时间回答. 开发机器人,这句话本身就是 ...

  9. 【重磅】世界首例无创脑机接口,无大脑电极植入操控机器臂与飞行器

    美国明尼苏达大学的研究者日前取得一项重大突破,让普通人在没有植入大脑电极的情况下,只凭借"意念",在复杂的三维空间内实现物体控制,包括操纵机器臂抓取.放置物体和控制飞行器飞行.该研 ...

最新文章

  1. jsp内置对象application
  2. Maven类包冲突终极解决小技若干
  3. Unity UGUI - Canvas / Camera
  4. HarmonyOS之设备传感器的功能和使用
  5. Codeforces Round #640 (Div. 4)(ABCDE)
  6. groovy 2.5.4_Java 7 vs Groovy 2.1性能比较
  7. 多线程编程 RW_LOCK 读写锁
  8. 用php做盒子模型,什么是CSS盒子模型?一文带你了解CSS盒子模型
  9. 23种设计模式(十九)数据结构之组合模式
  10. android 手机内存清理,教你彻底清理手机内存的最佳方法,只需一招
  11. 分数间隔 matlab,分数间隔均衡器
  12. 19|雨季来临,聊些轻松的吧
  13. Linux系统打不开gedit文本编辑器
  14. 为此计算机上所有用户安装此加载项,win7打开特定网站提示“控件无法安装,windows已阻止此软件因为无法验证发行者”怎么办...
  15. mySql | Error: ER_DATA_TOO_LONG: Data too long for column 'base_info' at row 1
  16. 机器学习之随机森林填补缺失值和众数填补缺失值
  17. DOS命令批处理 及 硬盘修复
  18. Live Messenger ,Gmail ,Orkut ,Wallop
  19. 写于数学建模美赛准备期间
  20. 手把手教你用图灵机器人做微信公众号自动回复助手

热门文章

  1. 华为FLA AL20手机刷机官方线刷包附售后刷机教程
  2. webpack-js压缩
  3. 测试用例该包含哪些部分
  4. 自己研发的核辐射探测仪——盖格计数器,探测装修材料,海淘的利器
  5. RabbitMQ之订阅模式与主题模式,消息确认机制
  6. asp mysql 查询_ASP数据库查询语言(一)
  7. Android 实现QQ聊天底部+号显示底部菜单
  8. 基于Tensorflow的环境声音分类
  9. K8s 1.23.x版本nfs持久存储报错 persistentvolume-controller waiting for a volume to be created, either by ext
  10. 制作服务器需要哪些,怎么搭建一个小型企业服务器机房,做这些都需要什么