Skip to content

Example#

Minimal TrackEngine example. The example is based on OpenCV library as the easiest and well-known mean of capturing frames from a camera and drawing.


#include "tsdk/ITrackEngine.h"
#include <opencv/cv.hpp>
#include <iostream>
#include <map>

std::map<int,cv::Mat> frameImages;
std::map<int,cv::Mat> bestShotImages;

struct Observer :
        tsdk::IBestShotObserver,
        tsdk::IVisualObserver,
        tsdk::IBestShotPredicate {

    int m_streamId;
    std::map<int, int> m_bestAreas;
    Observer(int streamId) : m_streamId {

    }

    void bestShot(const tsdk::DetectionDescr& detection, const tsdk::AdditionalFrameData* data) override {
        const cv::Mat cvFrame(detection.image.getHeight(), detection.image.getWidth(), CV_8UC3, const_cast<void*>(detection.image.getData()));
        // save best shot crop to map
        bestShotImages[detection.trackId] = cvFrame(cv::Rect(detection.detection.rect.x,
            detection.detection.rect.y,
            detection.detection.rect.width,
            detection.detection.rect.height)).clone();
    }

    void visual(const tsdk::FrameId &frameId,
                const fsdk::Image &image,
                const tsdk::TrackInfo * trackInfo,
                const int nTrack, 
                const tsdk::AdditionalFrameData* data) override {
        // convert fsdk::Image to cv::Mat
        const cv::Mat cvFrame(image.getHeight(), image.getWidth(), CV_8UC3, const_cast<void*>(image.getData()));
        // save frame to the map
        frameImages[m_streamId] = cvFrame.clone();
        for (size_t i = 0; i < nTrack; i++) {
            // draw detection rectangle on frame
            cv::rectangle(frameImages[m_streamId],
                          cv::Rect(trackInfo[i].rect.x,
                                   trackInfo[i].rect.y,
                                   trackInfo[i].rect.width,
                                   trackInfo[i].rect.height),
                          trackInfo[i].isDetector ? cv::Scalar(150, 10, 10) : cv::Scalar(10, 10, 150));
        }
    }

    void trackEnd(const tsdk::TrackId& trackId) override {
        // nothing to do here
    }

    bool checkBestShot(const tsdk::DetectionDescr& descr, const tsdk::AdditionalFrameData* data) override {
        // the bigger the better (example of best shot logic)
        if (descr.detection.rect.getArea() > m_bestAreas[descr.trackId]) {
            m_bestAreas[descr.trackId] = descr.detection.rect.getArea();
            return true;
        }
        return false;
    }
};

int main() {
    cv::Mat frame; //current frame
    cv::VideoCapture capture; //create the capture object
    int keyboard;
    capture.open(0);
    Observer visualObserver;

    const std::string fsdkDataPath = "path_to_faceEngine_data";
    const std::string licenseConfPath = fsdkDataPath + "/license.conf";
    // TrackEngine needs initialized faceEngine
    auto faceEngine = acquire(fsdk::createFaceEngine(fsdkDataPath.c_str()));
    fsdk::ISettingsProviderPtr config;
    config.acquire(fsdk::createSettingsProvider((fsdkDataPath + "/faceengine.conf").c_str()));
    faceEngine->setSettingsProvider(config);

    // Make license activation
    // Required only for mobile platform for now.
    fsdk::ILicense* licensePtr = faceEngine->getLicense();
    if (!licensePtr) {
        std::cout << "Failed to get FaceEngine license." << std::endl;
        return -1;
    }

    if (!fsdk::activateLicense(licensePtr, licenseConfPath.c_str())) {
        std::cout << "Failed to activate FaceEngine license." << std::endl;
        return -1;
    }

    // get TrackEngine
    fsdk::Ref<tsdk::ITrackEngine> trackEngine =
            fsdk::acquire_as<tsdk::ITrackEngine>(tsdk::createTrackEngine(faceEngine,
                "path_to_trackEngine_data/trackengine.conf"));
    // create stream
    fsdk::Ref<tsdk::IStream> stream = fsdk::acquire(trackEngine->createStream());
    // set callbacks for stream
    stream->setVisualObserver(&visualObserver);
    stream->setBestShotPredicate(&visualObserver);
    stream->setBestShotObserver(&visualObserver);

    if (!capture.isOpened()) {
        //error in opening the video input
        std::cout << "video not opened"<< std::endl;
        exit(EXIT_FAILURE);
    }
    int counter = 0;

    while( (char)keyboard != 'q' && (char)keyboard != 27 ) {
        //read the current frame
        if(!capture.read(frame)) {
            std::cerr << "Unable to read next frame." << std::endl;
            exit(EXIT_FAILURE);
        }

        // convert opencv matrix to fsdk::Image
        fsdk::Image im = fsdk::Image(frame.cols, frame.rows, fsdk::Format::R8G8B8, frame.data);
        fsdk::Image newim = im.clone();

        // push frame to our stream
        if (!stream->pushFrame(newim, counter++, nullptr)) {
            std::cout << "pushFrame error " << std::endl;
            capture.release();
            return 0;
        }

        // draw windows for all streams
        for (auto& im : frameImages) {
            cv::imshow(cv::format("frame %d",im.first), im.second);
        }

        // draw windows for all best shots
        for (auto& im : bestShotImages) {
            cv::imshow(cv::format("bestShot %d",im.first), im.second);
        }

        keyboard = cv::waitKey( 30 );
    }
    //delete capture object
    capture.release();
}
Back to top