Browse Source

新增雷达跟随

zhuo 4 years ago
parent
commit
4207716e3b

+ 1 - 1
src/robot_calibration/config/linear_calibrate_params.yaml

@@ -26,7 +26,7 @@
 #  20180911:增加check_rate和cmd_topic两个参数.
 #  20200218:由于使用了robot_pose_ekf进行信息融合,现在odom_frame修改了.
 #######################################################################
-test_distance: 2.0  # m
+test_distance: 1.0  # m
 linear_speed: 0.12  # m/s
 tolerance_linear: 0.005  # 0.5cm
 linear_scale: 1.0

+ 8 - 0
src/robot_dynamic/cfg/param.yaml

@@ -0,0 +1,8 @@
+#####################################################
+# Copyright: 2016-2020 www.corvin.cn ROS小课堂
+# Description:初次标定选择odom
+# Author:adam_zhuo
+# odomFrame:odom坐标选择
+######################################################
+
+odomFrame: /odom_combined

+ 6 - 1
src/robot_dynamic/launch/robot_dynamic.launch

@@ -1,4 +1,9 @@
 <launch>
-  <node pkg="robot_dynamic" type="robot_dynamic_node" name="robot_dynamic_node" output="screen" />
+  <arg name="cfg_file" default="$(find robot_dynamic)/cfg/param.yaml" />
+
+  <node pkg="robot_dynamic" type="robot_dynamic_node" name="robot_dynamic_node" output="screen">
+    <rosparam file="$(arg cfg_file)" command="load" />
+  </node>
+  
   <node pkg="rqt_reconfigure" type="rqt_reconfigure" name="rqt_reconfigure" output="screen" />
 </launch>

+ 3 - 1
src/robot_dynamic/src/robot_dynamic.cpp

@@ -1,7 +1,7 @@
 /*
  * @Author: adam_zhuo
  * @Date: 2020-08-31 09:40:13
- * @LastEditTime: 2020-09-04 09:36:15
+ * @LastEditTime: 2020-09-14 11:10:50
  * @LastEditors: Please set LastEditors
  * @Description: 动态调节测试小车转向移动是否准确
  * @FilePath: /blackTornadoRobot/src/robot_dynamic/src/robot_dynamic.cpp
@@ -201,6 +201,8 @@ int main(int argc, char **argv)
     ros::init(argc, argv, "robot_dynamic_node");
     ros::NodeHandle handle;
  
+    ros::param::get("~odomFrame", odomFrame);
+
     dynamic_reconfigure::Server<robot_dynamic::dynamicConfig> server;
     dynamic_reconfigure::Server<robot_dynamic::dynamicConfig>::CallbackType callback;
  

+ 192 - 0
src/robot_follower/CMakeLists.txt

@@ -0,0 +1,192 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(robot_follower)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+  roscpp
+  rospy
+  std_msgs
+  sensor_msgs
+  geometry_msgs
+  message_generation
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+##   your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+##   * add a build_depend tag for "message_generation"
+##   * add a build_depend and a run_depend tag for each package in MSG_DEP_SET
+##   * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+##     but can be declared for certainty nonetheless:
+##     * add a run_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+##   * add "message_generation" and every package in MSG_DEP_SET to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * add "message_runtime" and every package in MSG_DEP_SET to
+##     catkin_package(CATKIN_DEPENDS ...)
+##   * uncomment the add_*_files sections below as needed
+##     and list every .msg/.srv/.action file to be processed
+##   * uncomment the generate_messages entry below
+##   * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+ add_message_files(
+   FILES
+   position.msg
+ )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+#   FILES
+#   Service1.srv
+#   Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+#   FILES
+#   Action1.action
+#   Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+ generate_messages(
+   DEPENDENCIES
+   std_msgs
+   sensor_msgs
+   geometry_msgs
+ )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+##   * add a build_depend and a run_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+##   * add "dynamic_reconfigure" to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * uncomment the "generate_dynamic_reconfigure_options" section below
+##     and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+#   cfg/DynReconf1.cfg
+#   cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if you package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+#  INCLUDE_DIRS include
+#  LIBRARIES simple_follower
+  CATKIN_DEPENDS roscpp rospy std_msgs message_runtime
+#  DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+# include_directories(include)
+include_directories(
+  ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(simple_follower
+#   src/${PROJECT_NAME}/simple_follower.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(simple_follower ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+# add_executable(simple_follower_node src/simple_follower_node.cpp)
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(simple_follower_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(simple_follower_node
+#   ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# install(PROGRAMS
+#   scripts/my_python_script
+#   DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables and/or libraries for installation
+# install(TARGETS simple_follower simple_follower_node
+#   ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+#   DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+#   FILES_MATCHING PATTERN "*.h"
+#   PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+#   # myfile1
+#   # myfile2
+#   DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_simple_follower.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+#   target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)

+ 5 - 0
src/robot_follower/launch/laser_follower.launch

@@ -0,0 +1,5 @@
+<launch>
+  <include file='$(find robot_navigation)/launch/ydlidar_X2L.launch' />
+  <include file='$(find robot_follower)/launch/nodes/laserTracker.launch' />
+  <include file='$(find robot_follower)/launch/nodes/follower.launch' />
+</launch>

+ 13 - 0
src/robot_follower/launch/nodes/follower.launch

@@ -0,0 +1,13 @@
+<launch>
+  <node name='follower' pkg="robot_follower" type="follower.py">
+    <!-- switchMode: (if true one button press will change betwenn active, not active. If false the button will have to be kept pressed all the time to be active -->
+    <param name="switchMode" value="True" type="bool" />
+    <!-- maximal speed (angular and linear both), tartet distance: the robot will try to keep this fixed distance -->
+    <param name='maxSpeed' value='0.4' type='double' />
+    <param name='targetDist' value='0.5' type='double' />
+    <!-- index of the button in the buttons field of the joy message from the ps3 controller, that switches active/inactive  -->
+    <param name='controllButtonIndex' value='-4' type='int' />
+    <rosparam ns='PID_controller' command='load' file='$(find robot_follower)/parameters/PID_param.yaml' />
+  </node>
+</launch>
+

+ 8 - 0
src/robot_follower/launch/nodes/laserTracker.launch

@@ -0,0 +1,8 @@
+<launch>
+  <node name='laser_tracker' pkg="robot_follower" type="laserTracker.py">
+    <!-- This is to avoid treating noise in the laser ranges as objects -->
+    <!-- we check for all range measurements around the currently closest distance measurement (size of this window specified by winSize) if they where similarly close in the last scan (threshold specified by deltaDist-->
+    <param name="winSize" value="2" type="int" />
+    <param name="deltaDist" value="0.2" type="double" />
+  </node>
+</launch>

+ 17 - 0
src/robot_follower/launch/nodes/visualTracker.launch

@@ -0,0 +1,17 @@
+<launch>
+  <node name='visual_tracker' pkg="robot_follower" type="visualTracker.py">
+    <!-- color or the target in HSV color space -->
+    <rosparam ns='target'>
+      upper : [0, 120, 100]
+      lower : [9, 255, 255]
+    </rosparam>
+    <rosparam ns='pictureDimensions'>
+      <!-- Picture dimensions in pixel -->
+      pictureHeight: 480
+      pictureWidth: 640
+      <!-- Viewing angle of the camera in one direction in Radians -->
+      verticalAngle: 0.43196898986859655
+      horizontalAngle: 0.5235987755982988
+    </rosparam>
+  </node>
+</launch>

+ 4 - 0
src/robot_follower/launch/visual_follower.launch

@@ -0,0 +1,4 @@
+<launch>
+  <include file='$(find robot_follower)/launch/nodes/visualTracker.launch' />
+  <include file='$(find robot_follower)/launch/nodes/follower.launch' />
+</launch>

+ 3 - 0
src/robot_follower/msg/position.msg

@@ -0,0 +1,3 @@
+float32 angleX
+float32 angleY
+float32 distance

+ 61 - 0
src/robot_follower/package.xml

@@ -0,0 +1,61 @@
+<?xml version="1.0"?>
+<package>
+  <name>robot_follower</name>
+  <version>0.0.0</version>
+  <description>Makes a mobile robot follow a target. either using rgb-d or laser range finder</description>
+
+  <!-- One maintainer tag required, multiple allowed, one person per tag --> 
+  <!-- Example:  -->
+  <!-- <maintainer email="jane.doe@example.com">Jane Doe</maintainer> -->
+  <maintainer email="chutter@uos.de">clemens</maintainer>
+
+
+  <!-- One license tag required, multiple allowed, one license per tag -->
+  <!-- Commonly used license strings: -->
+  <!--   BSD, MIT, Boost Software License, GPLv2, GPLv3, LGPLv2.1, LGPLv3 -->
+  <license>TODO</license>
+
+
+  <!-- Url tags are optional, but mutiple are allowed, one per tag -->
+  <!-- Optional attribute type can be: website, bugtracker, or repository -->
+  <!-- Example: -->
+  <!-- <url type="website">http://wiki.ros.org/first_experiments</url> -->
+
+
+  <!-- Author tags are optional, mutiple are allowed, one per tag -->
+  <!-- Authors do not have to be maintianers, but could be -->
+  <!-- Example: -->
+  <!-- <author email="jane.doe@example.com">Jane Doe</author> -->
+
+
+  <!-- The *_depend tags are used to specify dependencies -->
+  <!-- Dependencies can be catkin packages or system dependencies -->
+  <!-- Examples: -->
+  <!-- Use build_depend for packages you need at compile time: -->
+  <build_depend>message_generation</build_depend>
+  <!-- Use buildtool_depend for build tool packages: -->
+  <!--   <buildtool_depend>catkin</buildtool_depend> -->
+  <!-- Use run_depend for packages you need at runtime: -->
+  <!-- Use test_depend for packages you need only for testing: -->
+  <!--   <test_depend>gtest</test_depend> -->
+  <buildtool_depend>catkin</buildtool_depend>
+  <build_depend>roscpp</build_depend>
+  <build_depend>rospy</build_depend>
+  <build_depend>std_msgs</build_depend>
+  <build_depend>sensor_msgs</build_depend>
+  <build_depend>geometry_msgs</build_depend>
+  <run_depend>message_runtime</run_depend>
+  <run_depend>message_generation</run_depend>
+  <run_depend>roscpp</run_depend>
+  <run_depend>rospy</run_depend>
+  <run_depend>std_msgs</run_depend>
+  <run_depend>sensor_msgs</run_depend>
+  <run_depend>geometry_msgs</run_depend>
+
+
+  <!-- The export tag contains other, unspecified, tags -->
+  <export>
+    <!-- Other tools can request additional information be placed here -->
+
+  </export>
+</package>

+ 3 - 0
src/robot_follower/parameters/PID_param.yaml

@@ -0,0 +1,3 @@
+P: [0.7, 0.6] 
+I: [0.07,0.04]
+D: [0.00, 0.000]

+ 174 - 0
src/robot_follower/scripts/follower.py

@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+'''
+Author: adam_zhuo
+Date: 2020-09-14 13:56:07
+LastEditTime: 2020-09-14 20:27:08
+LastEditors: Please set LastEditors
+Description: 小车跟随最近物体移动.
+FilePath: /blackTornadoRobot/src/robot_follower/scripts/follower.py
+'''
+
+
+import rospy
+import thread, threading
+import time
+import numpy as np
+from sensor_msgs.msg import Joy, LaserScan
+from geometry_msgs.msg import Twist, Vector3
+from robot_follower.msg import position as PositionMsg
+from std_msgs.msg import String as StringMsg
+
+class Follower:
+	def __init__(self):
+		# as soon as we stop receiving Joy messages from the ps3 controller we stop all movement:
+		self.switchMode= rospy.get_param('~switchMode') # if this is set to False the O button has to be kept pressed in order for it to move
+		self.max_speed = rospy.get_param('~maxSpeed') 
+		self.controllButtonIndex = rospy.get_param('~controllButtonIndex')
+
+		self.buttonCallbackBusy=False
+		self.active=True
+
+		self.cmdVelPublisher = rospy.Publisher('/cmd_vel', Twist, queue_size =3)
+
+		# the topic for the tracker that gives us the current position of the object we are following
+		self.positionSubscriber = rospy.Subscriber('/object_tracker/current_position', PositionMsg, self.positionUpdateCallback)
+		# an info string from that tracker. E.g. telling us if we lost the object
+		self.trackerInfoSubscriber = rospy.Subscriber('/object_tracker/info', StringMsg, self.trackerInfoCallback)
+
+		# PID parameters first is angular, dist
+		targetDist = rospy.get_param('~targetDist')
+		PID_param = rospy.get_param('~PID_controller')
+		# the first parameter is the angular target (0 degrees always) the second is the target distance (say 1 meter)
+		self.PID_controller = simplePID([0, targetDist], PID_param['P'], PID_param['I'], PID_param['D'])
+
+		# this method gets called when the process is killed with Ctrl+C
+		rospy.on_shutdown(self.controllerLoss)
+
+	def trackerInfoCallback(self, info):
+		# we do not handle any info from the object tracker specifically at the moment. just ignore that we lost the object for example
+		rospy.logwarn(info.data)
+	
+	def positionUpdateCallback(self, position):
+		# gets called whenever we receive a new position. It will then update the motorcomand
+	
+		if(not(self.active)):
+			return #if we are not active we will return imediatly without doing anything
+
+		angleX= position.angleX
+		distance = position.distance
+
+		rospy.logwarn('Angle: {}, Distance: {}, '.format(angleX, distance))
+		
+		# call the PID controller to update it and get new speeds
+		[uncliped_ang_speed, uncliped_lin_speed] = self.PID_controller.update([angleX, distance])
+			
+		# clip these speeds to be less then the maximal speed specified above
+		angularSpeed = np.clip(-uncliped_ang_speed, -self.max_speed, self.max_speed)
+		linearSpeed  = np.clip(-uncliped_lin_speed, -self.max_speed, self.max_speed)
+		
+		# create the Twist message to send to the cmd_vel topic
+		velocity = Twist()	
+		velocity.linear = Vector3(linearSpeed,0,0.)
+		velocity.angular= Vector3(0., 0.,angularSpeed)
+		rospy.logwarn('linearSpeed: {}, angularSpeed: {}'.format(linearSpeed, angularSpeed))
+		self.cmdVelPublisher.publish(velocity)
+		
+
+	def stopMoving(self):
+		velocity = Twist()
+		velocity.linear = Vector3(0.,0.,0.)
+		velocity.angular= Vector3(0.,0.,0.)
+		self.cmdVelPublisher.publish(velocity)
+
+	def controllerLoss(self):
+		# we lost connection so we will stop moving and become inactive
+		self.stopMoving()
+		self.active = False
+		rospy.loginfo('lost connection')
+
+
+		
+class simplePID:
+	'''very simple discrete PID controller'''
+	def __init__(self, target, P, I, D):
+		'''Create a discrete PID controller
+		each of the parameters may be a vector if they have the same length
+		
+		Args:
+		target (double) -- the target value(s)
+		P, I, D (double)-- the PID parameter
+
+		'''
+
+		# check if parameter shapes are compatabile. 
+		if(not(np.size(P)==np.size(I)==np.size(D)) or ((np.size(target)==1) and np.size(P)!=1) or (np.size(target )!=1 and (np.size(P) != np.size(target) and (np.size(P) != 1)))):
+			raise TypeError('input parameters shape is not compatable')
+		rospy.loginfo('PID initialised with P:{}, I:{}, D:{}'.format(P,I,D))
+		self.Kp		=np.array(P)
+		self.Ki		=np.array(I)
+		self.Kd		=np.array(D)
+		self.setPoint   =np.array(target)
+		
+		self.last_error=0
+		self.integrator = 0
+		self.integrator_max = float('inf')
+		self.timeOfLastCall = None 
+		
+		
+	def update(self, current_value):
+		'''Updates the PID controller. 
+
+		Args:
+			current_value (double): vector/number of same legth as the target given in the constructor
+
+		Returns:
+			controll signal (double): vector of same length as the target
+
+		'''
+		current_value=np.array(current_value)
+		if(np.size(current_value) != np.size(self.setPoint)):
+			raise TypeError('current_value and target do not have the same shape')
+		if(self.timeOfLastCall is None):
+			# the PID was called for the first time. we don't know the deltaT yet
+			# no controll signal is applied
+			self.timeOfLastCall = time.clock()
+			return np.zeros(np.size(current_value))
+
+		
+		error = self.setPoint - current_value
+		P =  error
+		
+		currentTime = time.clock()
+		deltaT      = (currentTime-self.timeOfLastCall)
+
+		# integral of the error is current error * time since last update
+		self.integrator = self.integrator + (error*deltaT)
+		I = self.integrator
+		
+		# derivative is difference in error / time since last update
+		D = (error-self.last_error)/deltaT
+		
+		self.last_error = error
+		self.timeOfLastCall = currentTime
+		
+		# return controll signal
+		return self.Kp*P + self.Ki*I + self.Kd*D
+		
+		
+	
+
+			
+
+
+
+
+if __name__ == '__main__':
+	print('starting')
+	rospy.init_node('follower')
+	follower = Follower()
+	try:
+		rospy.spin()
+	except rospy.ROSInterruptException:
+		print('exception')
+
+

+ 96 - 0
src/robot_follower/scripts/laserTracker.py

@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+
+'''
+Author: adam_zhuo
+Date: 2020-09-14 13:56:07
+LastEditTime: 2020-09-14 20:27:52
+LastEditors: Please set LastEditors
+Description: 调用雷达数据
+FilePath: /blackTornadoRobot/src/robot_follower/scripts/laserTracker.py
+'''
+
+import rospy
+import thread, threading
+import time
+import numpy as np
+from sensor_msgs.msg import Joy, LaserScan
+from geometry_msgs.msg import Twist, Vector3
+from std_msgs.msg import String as StringMsg
+from robot_follower.msg import position as PositionMsg
+		
+class laserTracker:
+	def __init__(self):
+		self.lastScan=None
+		self.winSize = rospy.get_param('~winSize')
+		self.deltaDist = rospy.get_param('~deltaDist')
+		self.scanSubscriber = rospy.Subscriber('/scan', LaserScan, self.registerScan)
+		self.positionPublisher = rospy.Publisher('/object_tracker/current_position', PositionMsg,queue_size=3)
+		self.infoPublisher = rospy.Publisher('/object_tracker/info', StringMsg, queue_size=3)
+
+	def registerScan(self, scan_data):
+		# registers laser scan and publishes position of closest object (or point rather)
+		ranges = np.array(scan_data.ranges)
+		# sort by distance to check from closer to further away points if they might be something real
+		sortedIndices = np.argsort(ranges)
+		
+		minDistanceID = None
+		minDistance   = float('inf')		
+
+		if(not(self.lastScan is None)):
+			# if we already have a last scan to compare to:
+			for i in sortedIndices:
+				# check all distance measurements starting from the closest one
+				tempMinDistance   = ranges[i]
+				if tempMinDistance == 0:
+					continue
+
+				# now we check if this might be noise:
+				# get a window. in it we will check if there has been a scan with similar distance
+				# in the last scan within that window
+				
+				# we kneed to clip the window so we don't have an index out of bounds
+				windowIndex = np.clip([i-self.winSize, i+self.winSize+1],0,len(self.lastScan))
+				window = self.lastScan[windowIndex[0]:windowIndex[1]]
+
+				with np.errstate(invalid='ignore'):
+					# check if any of the scans in the window (in the last scan) has a distance close enough to the current one
+					if(np.any(abs(window-tempMinDistance)<=self.deltaDist)):
+					# this will also be false for all tempMinDistance = NaN or inf
+
+						# we found a plausible distance
+						minDistanceID = i
+						minDistance = ranges[minDistanceID]
+						break # at least one point was equally close
+						# so we found a valid minimum and can stop the loop
+			
+		self.lastScan=ranges	
+		
+		#catches no scan, no minimum found, minimum is actually inf
+		if(minDistance > scan_data.range_max):
+			#means we did not really find a plausible object
+			
+			# publish warning that we did not find anything
+			rospy.logwarn('laser no object found')
+			self.infoPublisher.publish(StringMsg('laser:nothing found'))
+			
+		else:
+			# calculate angle of the objects location. 0 is straight ahead
+			minDistanceAngle = scan_data.angle_min + minDistanceID * scan_data.angle_increment
+			rospy.logwarn('minDistanceAngle and minDistance is {},{} '.format(minDistanceAngle, minDistance))
+			# here we only have an x angle, so the y is set arbitrarily
+			self.positionPublisher.publish(PositionMsg(minDistanceAngle, 42, minDistance))
+			
+
+
+
+if __name__ == '__main__':
+	print('starting')
+	rospy.init_node('laser_tracker')
+	tracker = laserTracker()
+	print('seems to do something')
+	try:
+		rospy.spin()
+	except rospy.ROSInterruptException:
+		print('exception')
+
+

+ 114 - 0
src/robot_follower/scripts/testCode.py

@@ -0,0 +1,114 @@
+import numpy as np
+import copy
+from unsafe_runaway import *
+import time
+from nose.tools import assert_raises
+
+
+
+
+class mockup:
+	pass
+	
+
+def makeLaserData():
+	laser_data = mockup()
+	laser_data.range_max = 5.6
+	laser_data.angle_min = -2.1
+	laser_data.angle_increment = 0.06136
+	laser_data.ranges = list(1+np.random.rand(69)*5)
+	return laser_data
+
+class Test_simpleTracker:
+
+	
+	def setUp(self):
+		self.tracker = simpleTracker() 
+	
+	def test_ignor_first_scan(self):
+		laser_data = makeLaserData()
+		tracker = simpleTracker()
+		assert_raises(UserWarning, self.tracker.registerScan,laser_data)
+
+	def test_unmuted(self):
+		laser_data = makeLaserData()
+		backup = copy.copy(laser_data)
+		try:
+			angle, distance = self.tracker.registerScan(laser_data)
+		except:
+			pass
+		assert backup.ranges == laser_data.ranges
+		#print(laser_data.ranges)
+		#print('angle: {}, dist: {}'.format(angle, distance))	
+
+	def test_nan(self):
+		laser_data = makeLaserData()
+		assert_raises(UserWarning, self.tracker.registerScan,laser_data)
+		laser_data.ranges[12] = float('nan')
+		angle, dist=self.tracker.registerScan(laser_data)
+		#print('angle: {}, dist: {}'.format(angle, dist))	
+
+	def test_only_nan(self):
+		laser_data = makeLaserData()
+		laser_data.ranges = [float('nan') for _ in laser_data.ranges]
+		assert_raises(UserWarning, self.tracker.registerScan,laser_data)
+		assert_raises(UserWarning, self.tracker.registerScan,laser_data)
+
+		
+
+	def test_real_real_min(self):
+		laser_data = makeLaserData()
+		laser_data.ranges[-1]=0.5 #real min
+		assert_raises(UserWarning, self.tracker.registerScan,laser_data)
+		laser_data.ranges[-1]=0.6 
+		laser_data.ranges[42]=0.1 #fake min
+		ang, dist = self.tracker.registerScan(laser_data)
+		assert dist == 0.6
+		#print('ang: {}, target: {}'.format(ang, (laser_data.angle_min+ 23*laser_data.angle_increment)))	
+		assert ang == laser_data.angle_min+ 68*laser_data.angle_increment
+
+
+class Test_PID:
+	def setUp(self): 
+		pass
+
+	def test_convergence(self):
+		self.pid = simplePID([0,30], 0.8, 0.001, 0.0001)
+		x =np.array([23, 12])
+		for i in range(20):
+			update= self.pid.update(x)
+			print('added {} to current x {}'.format(update, x))
+			x = x+update 
+			time.sleep(0.1)
+		assert np.all(abs(x-[0,30])<=0.01)
+
+	def test_convergence_differentParamShape(self):
+		self.pid = simplePID([0,30],0.8, 0.001, 0.0001)
+		x =np.array([23, 12])
+		for i in range(20):
+			update= self.pid.update(x)
+			print('added {} to current x {}'.format(update, x))
+			x = x+update 
+			time.sleep(0.1)
+		assert np.all(abs(x-[0,30])<=0.01)
+
+	 
+	def test_raises_unequal_param_shape_at_creation(self):
+		assert_raises(TypeError, simplePID, [0,30],[0.8, 0.7, 0.1], 0.001, 0.0001)
+		assert_raises(TypeError, simplePID, [0,30],[0.8, 0.7], 0.001, 0.0001)
+		assert_raises(TypeError, simplePID, 0,[0.8, 0.7], 0.001, 0.0001)
+		assert_raises(TypeError, simplePID, 0, [0.8, 0.7], [0.001, 0.001], [0.0001, 0,0001])
+		_ =  simplePID([0,30],[0.8, 0.7], [0.001, 0.001], [0.0001, 0.0001])
+		_ =  simplePID([0,30],0.8, 0.001, 0.0001)
+		_ =  simplePID(0,0.8, 0.001, 0.0001)
+
+	def test_raise_incompatable_input(self):
+		self.pid = simplePID([0,30], 0.8, 0.001, 0.0001)
+		_ = assert_raises(TypeError, self.pid.update, 3)
+		x =np.array([23, 12])
+		for i in range(50):
+			update= self.pid.update(x)
+			print('added {} to current x {}'.format(update, x))
+			x = x+update 
+			time.sleep(0.1)
+		assert np.all(abs(x-[0,30])<=0.001)

+ 206 - 0
src/robot_follower/scripts/visualTracker.py

@@ -0,0 +1,206 @@
+#!/usr/bin/env python
+
+from __future__ import division
+import rospy
+import message_filters
+import numpy as np
+import cv2
+from matplotlib import pyplot as plt
+from cv_bridge import CvBridge 
+
+from sensor_msgs.msg import Image
+from robot_follower.msg import position as PositionMsg
+from std_msgs.msg import String as StringMsg
+np.seterr(all='raise')  
+displayImage=False
+
+plt.close('all')
+class visualTracker:
+	def __init__(self):
+		self.bridge = CvBridge()
+		self.targetUpper = np.array(rospy.get_param('~target/upper'))
+		self.targetLower = np.array(rospy.get_param('~target/lower'))
+		self.pictureHeight= rospy.get_param('~pictureDimensions/pictureHeight')
+		self.pictureWidth = rospy.get_param('~pictureDimensions/pictureWidth')
+		vertAngle =rospy.get_param('~pictureDimensions/verticalAngle')
+		horizontalAngle =  rospy.get_param('~pictureDimensions/horizontalAngle')
+		# precompute tangens since thats all we need anyways:
+		self.tanVertical = np.tan(vertAngle)
+		self.tanHorizontal = np.tan(horizontalAngle)
+		
+		self.lastPosition =None
+
+		# one callback that deals with depth and rgb at the same time
+		im_sub = message_filters.Subscriber('/orbbec_astra/rgb/image_rect_color', Image)
+		dep_sub = message_filters.Subscriber('/orbbec_astra/depth/image', Image)
+		self.timeSynchronizer = message_filters.ApproximateTimeSynchronizer([im_sub, dep_sub], 10, 0.5)
+		self.timeSynchronizer.registerCallback(self.trackObject)
+
+
+		self.positionPublisher = rospy.Publisher('/object_tracker/current_position', PositionMsg, queue_size=3)
+		self.infoPublisher = rospy.Publisher('/object_tracker/info', StringMsg, queue_size=3)
+		rospy.logwarn(self.targetUpper)
+
+
+	def trackObject(self, image_data, depth_data):
+		if(image_data.encoding != 'rgb8'):
+			raise ValueError('image is not rgb8 as expected')
+
+		#convert both images to numpy arrays
+		frame = self.bridge.imgmsg_to_cv2(image_data, desired_encoding='rgb8')
+		depthFrame = self.bridge.imgmsg_to_cv2(depth_data, desired_encoding='passthrough')#"32FC1")
+
+		if(np.shape(frame)[0:2] != (self.pictureHeight, self.pictureWidth)):
+			raise ValueError('image does not have the right shape. shape(frame): {}, shape parameters:{}'.format(np.shape(frame)[0:2], (self.pictureHeight, self.pictureWidth)))
+
+		# blure a little and convert to HSV color space
+		blurred = cv2.GaussianBlur(frame, (11,11), 0)
+		hsv = cv2.cvtColor(blurred, cv2.COLOR_RGB2HSV)
+		
+		# select all the pixels that are in the range specified by the target
+		org_mask = cv2.inRange(hsv, self.targetUpper, self.targetLower)	
+
+		# clean that up a little, the iterations are pretty much arbitrary
+		mask = cv2.erode(org_mask, None, iterations=4)
+		mask = cv2.dilate(mask,None, iterations=3)
+		
+		# find contours of the object
+		contours = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2]
+		newPos = None #if no contour at all was found the last position will again be set to none
+
+		# lets you display the image for debuging. Not in realtime though
+		if displayImage:
+			backConverted = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
+			#cv2.imshow('frame', backConverted)
+			#cv2.waitKey(0)
+			#print(backConverted)
+			plt.figure()
+			plt.subplot(2,2,1)
+			plt.imshow(frame)
+			plt.xticks([]),plt.yticks([])
+			plt.subplot(2,2,2)
+			plt.imshow(org_mask, cmap='gray', interpolation = 'bicubic')
+			plt.xticks([]),plt.yticks([])
+			plt.subplot(2,2,3)
+			plt.imshow(mask, cmap='gray', interpolation = 'bicubic')
+			plt.xticks([]),plt.yticks([])
+			plt.show()
+			rospy.sleep(0.2)
+
+		# go threw all the contours. starting with the bigest one
+		for contour in sorted(contours, key=cv2.contourArea, reverse=True):
+			# get position of object for this contour
+		 	pos = self.analyseContour(contour, depthFrame)
+
+			# if it's the first one we found it will be the fall back for the next scan if we don't find a plausible one
+			if newPos is None:
+				newPos = pos
+			
+			# check if the position is plausible
+			if self.checkPosPlausible(pos):
+				self.lastPosition = pos
+				self.publishPosition(pos)
+				return
+
+		self.lastPosition = newPos #we didn't find a plossible last position, so we just save the biggest contour 
+		# and publish warnings
+		rospy.loginfo('no position found')
+		self.infoPublisher.publish(StringMsg('visual:nothing found'))
+
+			
+			
+			
+
+	def publishPosition(self, pos):
+		# calculate the angles from the raw position
+		angleX = self.calculateAngleX(pos)
+		angleY = self.calculateAngleY(pos)
+		# publish the position (angleX, angleY, distance)
+		posMsg = PositionMsg(angleX, angleY, pos[1])
+		self.positionPublisher.publish(posMsg)
+
+	def checkPosPlausible(self, pos):
+		'''Checks if a position is plausible. i.e. close enough to the last one.'''
+
+		# for the first scan we cant tell
+		if self.lastPosition is None:
+			return False
+
+		# unpack positions
+		((centerX, centerY), dist)=pos	
+		((PcenterX, PcenterY), Pdist)=self.lastPosition
+		
+		if np.isnan(dist):
+			return False
+
+		# distance changed to much
+		if abs(dist-Pdist)>0.5:
+			return False
+
+		# location changed to much (5 is arbitrary)
+		if abs(centerX-PcenterX)>(self.pictureWidth /5):
+			return False
+
+		if abs(centerY-PcenterY)>(self.pictureHeight/5):
+			return False
+		
+		return True
+				
+		
+	def calculateAngleX(self, pos):
+		'''calculates the X angle of displacement from straight ahead'''
+
+		centerX = pos[0][0]
+		displacement = 2*centerX/self.pictureWidth-1
+		angle = -1*np.arctan(displacement*self.tanHorizontal)
+		return angle
+
+	def calculateAngleY(self, pos):
+		centerY = pos[0][1]
+		displacement = 2*centerY/self.pictureHeight-1
+		angle = -1*np.arctan(displacement*self.tanVertical)
+		return angle
+
+	def analyseContour(self, contour, depthFrame):
+		'''Calculates the centers coordinates and distance for a given contour
+
+		Args:
+			contour (opencv contour): contour of the object
+			depthFrame (numpy array): the depth image
+		
+		Returns:
+			centerX, centerY (doubles): center coordinates
+			averageDistance : distance of the object
+		'''
+		# get a rectangle that completely contains the object
+		centerRaw, size, rotation = cv2.minAreaRect(contour)
+
+		# get the center of that rounded to ints (so we can index the image)
+		center = np.round(centerRaw).astype(int)
+
+		# find out how far we can go in x/y direction without leaving the object (min of the extension of the bounding rectangle/2 (here 3 for safety)) 
+		minSize = int(min(size)/3)
+
+		# get all the depth points within this area (that is within the object)
+		depthObject = depthFrame[(center[1]-minSize):(center[1]+minSize), (center[0]-minSize):(center[0]+minSize)]
+
+		# get the average of all valid points (average to have a more reliable distance measure)
+		depthArray = depthObject[~np.isnan(depthObject)]
+		averageDistance = np.mean(depthArray)
+
+		if len(depthArray) == 0:
+			rospy.logwarn('empty depth array. all depth values are nan')
+
+		return (centerRaw, averageDistance)
+		
+
+if __name__ == '__main__':
+        print('starting')
+        rospy.init_node('visual_tracker')
+	tracker=visualTracker()
+        print('seems to do something')
+        try:
+                rospy.spin()
+        except rospy.ROSInterruptException:
+                print('exception')
+

+ 12 - 0
src/ros_arduino_bridge/ros_arduino_python/src/ros_arduino_python/base_controller.py

@@ -249,6 +249,18 @@ class BaseController:
             quaternion.z = sin(self.th/2.0)
             quaternion.w = cos(self.th/2.0)
 
+##########################################################
+            # create the odometry transform frame broadcaster.
+            """
+            self.odomBroadcaster.sendTransform(
+                (self.pose_x, self.pose_y, 0),
+                (quaternion.x, quaternion.y, quaternion.z, quaternion.w),
+                rospy.Time.now(),
+                self.base_frame,
+                self.odom_name
+            )
+            """
+##########################################################
             odom = Odometry()
             odom.header.frame_id = self.odom_name
             odom.child_frame_id  = self.base_frame