Track targets using opencv’s tracking module

OpenCV tracking module algorithm introduction

OpenCV’s tracking module is a powerful tracking algorithm library that contains a variety of algorithms for tracking objects. It helps you locate an object such as a face, eye, vehicle, etc. in consecutive video frames.

In the OpenCV tracking module, some of the main tracking algorithms include:

  • Sparse optical flow: For example, the Kanade-Lucas-Tomashi (KLT) feature tracking algorithm tracks the positions of several feature points in the image.
  • Kalman Filtering: A very popular signal processing algorithm based on prior motion information, used to predict the location of moving targets. One of the early applications of this algorithm was missile guidance.
  • Meanshift and Camshift: These are algorithms for locating the maximum value of the density function, they are also used for tracking.
    Single object trackers: In this type of trackers, the first frame is marked with a rectangle to indicate the location of the object to be tracked. The object is then tracked in subsequent frames using a tracking algorithm. In most real-world applications, these trackers are used together with object detectors.
  • Multiple object track finding algorithms: When we have a fast object detector that detects multiple objects in each frame and then runs a track finding algorithm to identify which rectangle in a frame is related to the next Correspondence of rectangles in a frame makes sense.

Each of these algorithms has advantages and disadvantages, and the appropriate algorithm can be selected according to the actual application scenario.

The specific calling steps are as follows:

  1. Open the first frame of the video frame
  2. Frame the target and press the Enter key to confirm the selection each time you select a target.
  3. Press Esc to exit box selection mode
  4. The program executes the tracking algorithm and draws the prediction box
#include <opencv2/opencv.hpp>
#include <opencv2/tracking.hpp>
#include "timestamp.hpp"

using namespace cv;
using namespace std;

//Load static library
#if defined(_WIN32) & amp; & amp;defined(_DEBUG)
#pragma comment(lib, "opencv_world346d.lib")
#elif defined(_WIN32)
#pragma comment(lib, "opencv_world346.lib")
#endif

//Supported tracking algorithms
vector<string> trackerTypes = { "BOOSTING", "MIL", "KCF", "TLD", "MEDIANFLOW", "GOTURN", "MOSSE", "CSRT" };

//Create a tracker based on name
Ptr<Tracker> createTrackerByName(string trackerType)
{
    Ptr<Tracker> tracker;
    if (trackerType == trackerTypes[0])
        tracker = TrackerBoosting::create();
    else if (trackerType == trackerTypes[1])
        tracker = TrackerMIL::create();
    else if (trackerType == trackerTypes[2])
        tracker = TrackerKCF::create();
    else if (trackerType == trackerTypes[3])
        tracker = TrackerTLD::create();
    else if (trackerType == trackerTypes[4])
        tracker = TrackerMedianFlow::create();
    else if (trackerType == trackerTypes[5])
        tracker = TrackerGOTURN::create();
    else if (trackerType == trackerTypes[6])
        tracker = TrackerMOSSE::create();
    else if (trackerType == trackerTypes[7])
        tracker = TrackerCSRT::create();
    else {
        cout << "Incorrect tracker name" << endl;
        cout << "Available trackers are: " << endl;
        for (vector<string>::iterator it = trackerTypes.begin(); it != trackerTypes.end(); + + it)
            std::cout << " " << *it << endl;
    }
    return tracker;
}

// Fill the vector with random colors
void getRandomColors(vector<Scalar> & amp;colors, int numColors)
{
    RNG rng(0);
    for (int i = 0; i < numColors; i + + )
        colors.push_back(Scalar(rng.uniform(0, 255), rng.uniform(0, 255), rng.uniform(0, 255)));
}

int help(char* argv[])
{
    std::cout << "please input arguments:" << argv[0] << "tracktype video.mp4 videoiotype"<< std::endl;

    return -1;
}

int main(int argc, char * argv[])
{
    if(argc < 4){
        return help(argv);
    }

    cout << "The default algorithm is CSRT" << endl;
    cout << "Supported algorithms include:" << endl;
    for (vector<string>::iterator it = trackerTypes.begin(); it != trackerTypes.end(); + + it)
        std::cout << " " << *it << endl;

    //Set the tracker type. Change this setting to try a different tracker. String trackerType;
    if(atoi(argv[1]) == 0)
        trackerType = "MOSSE";
    else if(atoi(argv[1]) == 1)
        trackerType = "KCF";
    else
        trackerType = "CSRT";

    // Set default values for tracking algorithm and video
    string videoPath = argv[2];

    // Initialize MultiTracker using tracking algorithm
    vector<Rect> bboxes;

    //Create a video capture object to read the video
    cv::VideoCapture cap;
    if(atoi(argv[3]) == 0)
        cap.open(0);
    else{
        cap.open(videoPath);
    }

    Mat frame;

    // If reading of video file is canceled, exit
    if (!cap.isOpened())
    {
        cout << "Error opening video file " << videoPath << endl;
        return -1;
    }

    // read first frame
    cap >> frame;

    // Draw a bounding box on the object
    //The default behavior of selectROI is to draw the box starting from the center
    // When fromCenter is set to false, the box can be drawn starting from the upper left corner
    bool showCrosshair = true;
    bool fromCenter = false;
    cout << "\
============================================ ==============\
";
    cout << "OpenCV means pressing c to cancel the object selection process" << endl;
    cout << "This won't work. Press Esc to exit the selection process" << endl;
    cout << "\
============================================ ==============\
";
    cv::selectROIs("MultiTracker", frame, bboxes, showCrosshair, fromCenter);

    // quit if there are no objects to track
    if (boxes.size() < 1)
        return 0;

    vector<Scalar> colors;
    getRandomColors(colors, bboxes.size());

    //Create multiple trackers
    Ptr<MultiTracker> multiTracker = cv::MultiTracker::create();

    //Initialize Multitracker
    for (int i = 0; i < bboxes.size(); i + + )
        multiTracker->add(createTrackerByName(trackerType), frame, Rect2d(bboxes[i]));

    // Handle video and tracking objects
    cout << "\
============================================ ==============\
";
    cout << "Start tracing, press ESC to exit." << endl;
    while (cap.isOpened())
    {
        // Get frames from video
        cap >> frame;

        // If the end of the video is reached, stop the program
        if (frame.empty()) break;
        {
            timestamp ti("update");
            //Update tracking results with new frame
            multiTracker->update(frame);
        }

        // Draw tracking object
        for (unsigned i = 0; i < multiTracker->getObjects().size(); i + + )
        {
            rectangle(frame, multiTracker->getObjects()[i], colors[i], 2, 1);
        }

        // display frame
        imshow("MultiTracker1", frame);

        //Exit X button
        if (waitKey(1) == 27) break;

    }
}