# Untitled

unknown
plain_text
2 years ago
7.5 kB
10
Indexable
Never
```Function Initialization()

The __init__() function is used to initialize class parameters. There are 6 parameters: dt, u_x, u_y, std_acc, x_std_meas, y_std_meas. dt is the sample time or one cycle time used to estimate the state. u_x and u_y are the acceleration in the - and - directions, respectively. std_acc is the magnitude of the process noise, x_std_meas and x_std_meas are the standard deviation of the measurements in the - and - directions, respectively.

This is how class is defined:

class KalmanFilter(object):
def __init__(self, dt, u_x,u_y, std_acc, x_std_meas, y_std_meas):
"""
:param dt: sampling time (time for 1 cycle)
:param u_x: acceleration in x-direction
:param u_y: acceleration in y-direction
:param std_acc: process noise magnitude
:param x_std_meas: standard deviation of the measurement in x-direction
:param y_std_meas: standard deviation of the measurement in y-direction
"""
# Define sampling time
self.dt = dt
# Define the  control input variables
self.u = np.matrix([[u_x],[u_y]])
# Intial State
self.x = np.matrix([[0], [0], [0], [0]])
# Define the State Transition Matrix A
self.A = np.matrix([[1, 0, self.dt, 0],
[0, 1, 0, self.dt],
[0, 0, 1, 0],
[0, 0, 0, 1]])
# Define the Control Input Matrix B
self.B = np.matrix([[(self.dt**2)/2, 0],
[0, (self.dt**2)/2],
[self.dt,0],
[0,self.dt]])
# Define Measurement Mapping Matrix
self.H = np.matrix([[1, 0, 0, 0],
[0, 1, 0, 0]])
#Initial Process Noise Covariance
self.Q = np.matrix([[(self.dt**4)/4, 0, (self.dt**3)/2, 0],
[0, (self.dt**4)/4, 0, (self.dt**3)/2],
[(self.dt**3)/2, 0, self.dt**2, 0],
[0, (self.dt**3)/2, 0, self.dt**2]]) * std_acc**2
#Initial Measurement Noise Covariance
self.R = np.matrix([[x_std_meas**2,0],
[0, y_std_meas**2]])
#Initial Covariance Matrix
self.P = np.eye(self.A.shape[1])

Function predict()

def predict(self):
# Refer to :Eq.(9) and Eq.(10)  in https://machinelearningspace.com/object-tracking-simple-implementation-of-kalman-filter-in-python/?preview_id=1364&preview_nonce=52f6f1262e&preview=true&_thumbnail_id=1795
# Update time state
#x_k =Ax_(k-1) + Bu_(k-1)     Eq.(9)
self.x = np.dot(self.A, self.x) + np.dot(self.B, self.u)
# Calculate error covariance
# P= A*P*A' + Q               Eq.(10)
self.P = np.dot(np.dot(self.A, self.P), self.A.T) + self.Q
return self.x[0:2]

Explanationfor step 1
The functionpredict() does the prediction of the state estimate  and the error covariance  . This task also called the time update process because it projects forward the current state to the next time step.

Step 2/4
Function update()

The update stage computes the Kalman gain and updates the prediction state estimates and prediction error covariance.

def update(self, z):
# Refer to :Eq.(11), Eq.(12) and Eq.(13)  in https://machinelearningspace.com/object-tracking-simple-implementation-of-kalman-filter-in-python/?preview_id=1364&preview_nonce=52f6f1262e&preview=true&_thumbnail_id=1795
# S = H*P*H'+R
S = np.dot(self.H, np.dot(self.P, self.H.T)) + self.R
# Calculate the Kalman Gain
# K = P * H'* inv(H*P*H'+R)
K = np.dot(np.dot(self.P, self.H.T), np.linalg.inv(S))  #Eq.(11)
self.x = np.round(self.x + np.dot(K, (z - np.dot(self.H, self.x))))   #Eq.(12)
I = np.eye(self.H.shape[1])
# Update error covariance matrix
self.P = (I - (K * self.H)) * self.P   #Eq.(13)
return self.x[0:2]

Step 3/4

This is Detector.py code:

import numpy as np
import cv2
def detect(frame,debugMode):
# Convert frame from BGR to GRAY
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
if (debugMode):
cv2.imshow('gray', gray)
# Edge detection using Canny function
img_edges = cv2.Canny(gray,  50, 190, 3)
if (debugMode):
cv2.imshow('img_edges', img_edges)
# Convert to black and white image
ret, img_thresh = cv2.threshold(img_edges, 254, 255,cv2.THRESH_BINARY)
if (debugMode):
cv2.imshow('img_thresh', img_thresh)
# Find contours
contours, _ = cv2.findContours(img_thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# Set the accepted minimum & maximum radius of a detected object
centers=[]
for c in contours:
# ref: https://docs.opencv.org/trunk/dd/d49/tutorial_py_contour_features.html
#Take only the valid circles
centers.append(np.array([[x], [y]]))
cv2.imshow('contours', img_thresh)
return centers

Step 4/4
objTracking.py

This is the main file for this project that runs to track objects. At the top of this file, import the detect() function from the Detector.py file and the KalmanFilter class from the KalmanFilter.py file.

import cv2
from Detector import detect
from KalmanFilter import KalmanFilter
def main():
HiSpeed= 100
ControlSpeedVar = 30  #Lowest: 1 - Highest:100
debugMode=1
#Create KalmanFilter object KF
#KalmanFilter(dt, u_x, u_y, std_acc, x_std_meas, y_std_meas)
KF = KalmanFilter(0.1, 1, 1, 1, 0.1,0.1)
# Create opencv video capture object
VideoCap = cv2.VideoCapture('video/OneBall.avi')
while(True):
# Detect object
centers = detect(frame,debugMode)

# If centroids are detected then track them
if (len(centers) > 0):
# Draw the detected circle
cv2.circle(frame, (int(centers[0][0]), int(centers[0][1])), 10, (0, 191, 255), 2)
# Predict
(x, y) = KF.predict()
# Draw a rectangle as the predicted object position
cv2.rectangle(frame, (x - 15, y - 15), (x + 15, y + 15), (255, 0, 0), 2)
# Update
(x1, y1) = KF.update(centers[0])
# Draw a rectangle as the estimated object position
cv2.rectangle(frame, (x1 - 15, y1 - 15), (x1 + 15, y1 + 15), (0, 0, 255), 2)
cv2.putText(frame, "Estimated Position", (x1 + 15, y1 + 10), 0, 0.5, (0, 0, 255), 2)
cv2.putText(frame, "Predicted Position", (x + 15, y), 0, 0.5, (255, 0, 0), 2)
cv2.putText(frame, "Measured Position", (centers[0][0] + 15, centers[0][1] - 15), 0, 0.5, (0,191,255), 2)
cv2.imshow('image', frame)
if cv2.waitKey(2) & 0xFF == ord('q'):
VideoCap.release()
cv2.destroyAllWindows()
break
cv2.waitKey(HiSpeed-ControlSpeedVar+1)
if __name__ == "__main__":
# execute main
main()

In step 1:
Initialization is done

In step 2:
Function update is provided.

In step 3:
Third file Detector.py is given.

In step 4:
Final objTracking.py file is provided.

Please find these code in above steps.

Thank You!```