diff --git a/c/detectNet.cpp b/c/detectNet.cpp index b113a5982..41e36ed21 100644 --- a/c/detectNet.cpp +++ b/c/detectNet.cpp @@ -660,7 +660,7 @@ int detectNet::postProcessSSD_UFF( Detection* detections, uint32_t width, uint32 if( object_data[2] < mConfidenceThreshold ) continue; - detections[numDetections].Instance = -1; //numDetections; //(uint32_t)object_data[0]; + detections[numDetections].TrackID = -1; //numDetections; //(uint32_t)object_data[0]; detections[numDetections].ClassID = (uint32_t)object_data[1]; detections[numDetections].Confidence = object_data[2]; detections[numDetections].Left = object_data[3] * width; @@ -722,7 +722,7 @@ int detectNet::postProcessSSD_ONNX( Detection* detections, uint32_t width, uint3 // populate a new detection entry const float* coord = bbox + n * numCoord; - detections[numDetections].Instance = -1; //numDetections; + detections[numDetections].TrackID = -1; //numDetections; detections[numDetections].ClassID = maxClass; detections[numDetections].Confidence = maxScore; detections[numDetections].Left = coord[0] * width; @@ -802,7 +802,7 @@ int detectNet::postProcessDetectNet( Detection* detections, uint32_t width, uint // create new entry if the detection wasn't merged with another detection if( !detectionMerged ) { - detections[numDetections].Instance = -1; //numDetections; + detections[numDetections].TrackID = -1; //numDetections; detections[numDetections].ClassID = z; detections[numDetections].Confidence = coverage; @@ -872,7 +872,7 @@ int detectNet::postProcessDetectNet_v2( Detection* detections, uint32_t width, u LogDebug(LOG_TRT "rect x=%u y=%u conf=%f (%f, %f) (%f, %f) \n", x, y, confidence, x1, y1, x2, y2); #endif - detections[numDetections].Instance = -1; //numDetections; + detections[numDetections].TrackID = -1; //numDetections; detections[numDetections].ClassID = c; detections[numDetections].Confidence = confidence; detections[numDetections].Left = x1; @@ -902,24 +902,30 @@ int detectNet::clusterDetections( Detection* detections, int n ) { // if the intersecting detections have different classes, pick the one with highest confidence // otherwise if they have the same object class, expand the detection bounding box + #ifdef CLUSTER_INTERCLASS if( detections[n].ClassID != detections[m].ClassID ) { if( detections[n].Confidence > detections[m].Confidence ) { detections[m] = detections[n]; - detections[m].Instance = -1; //m; + detections[m].TrackID = -1; //m; detections[m].ClassID = detections[n].ClassID; - detections[m].Confidence = detections[n].Confidence; + detections[m].Confidence = detections[n].Confidence; } + + return 0; // merged detection } else + #else + if( detections[n].ClassID == detections[m].ClassID ) + #endif { detections[m].Expand(detections[n]); detections[m].Confidence = fmaxf(detections[n].Confidence, detections[m].Confidence); - } - return 0; // merged detection + return 0; // merged detection + } } } @@ -949,7 +955,7 @@ void detectNet::sortDetections( Detection* detections, int numDetections ) // renumber the instance ID's //for( int i=0; i < numDetections; i++ ) - // detections[i].Instance = i; + // detections[i].TrackID = i; } @@ -1008,7 +1014,7 @@ bool detectNet::Overlay( void* input, void* output, uint32_t width, uint32_t hei } // class label overlay - if( (flags & OVERLAY_LABEL) || (flags & OVERLAY_CONFIDENCE) ) + if( (flags & OVERLAY_LABEL) || (flags & OVERLAY_CONFIDENCE) || (flags & OVERLAY_TRACKING) ) { static cudaFont* font = NULL; @@ -1025,38 +1031,42 @@ bool detectNet::Overlay( void* input, void* output, uint32_t width, uint32_t hei } // draw each object's description - std::vector< std::pair< std::string, int2 > > labels; - + #ifdef BATCH_TEXT + std::vector> labels; + #endif for( uint32_t n=0; n < numDetections; n++ ) { const char* className = GetClassDesc(detections[n].ClassID); const float confidence = detections[n].Confidence * 100.0f; const int2 position = make_int2(detections[n].Left+5, detections[n].Top+3); + char buffer[256]; + char* str = buffer; + + if( flags & OVERLAY_LABEL ) + str += sprintf(str, "%s ", className); + + if( flags & OVERLAY_TRACKING && detections[n].TrackID >= 0 ) + str += sprintf(str, "%i ", detections[n].TrackID); + if( flags & OVERLAY_CONFIDENCE ) - { - char str[256]; - - if( (flags & OVERLAY_LABEL) && (flags & OVERLAY_CONFIDENCE) ) - { - if( detections[n].Instance >= 0 ) - sprintf(str, "%s %i %.1f%%", className, detections[n].Instance, confidence); - else - sprintf(str, "%s %.1f%%", className, confidence); - } - else - sprintf(str, "%.1f%%", confidence); + str += sprintf(str, "%.1f%%", confidence); - labels.push_back(std::pair(str, position)); - } - else + #ifdef BATCH_TEXT + labels.push_back(std::pair(buffer, position)); + #else + if( detections[n].TrackID >= 0 ) { - // overlay label only - labels.push_back(std::pair(className, position)); + float4 color = make_float4(255,255,255,255); + color.w *= 1.0f - (fminf(detections[n].TrackLost, 15.0f) / 15.0f); + font->OverlayText(output, format, width, height, buffer, position.x, position.y, color); } + #endif } + #ifdef BATCH_TEXT font->OverlayText(output, format, width, height, labels, make_float4(255,255,255,255)); + #endif } PROFILER_END(PROFILER_VISUALIZE); @@ -1105,6 +1115,8 @@ uint32_t detectNet::OverlayFlagsFromStr( const char* str_user ) flags |= OVERLAY_LABEL; else if( strcasecmp(token, "conf") == 0 || strcasecmp(token, "confidence") == 0 ) flags |= OVERLAY_CONFIDENCE; + else if( strcasecmp(token, "track") == 0 || strcasecmp(token, "tracking") == 0 ) + flags |= OVERLAY_TRACKING; else if( strcasecmp(token, "line") == 0 || strcasecmp(token, "lines") == 0 ) flags |= OVERLAY_LINES; else if( strcasecmp(token, "default") == 0 ) diff --git a/c/detectNet.cu b/c/detectNet.cu index 02256471f..3471c4d7a 100644 --- a/c/detectNet.cu +++ b/c/detectNet.cu @@ -106,7 +106,12 @@ cudaError_t launchDetectionOverlay( T* input, T* output, uint32_t width, uint32_ const dim3 blockDim(8, 8); const dim3 gridDim(iDivUp(boxWidth,blockDim.x), iDivUp(boxHeight,blockDim.y)); - gpuDetectionOverlayBox<<>>(input, output, width, height, (int)detections[n].Left, (int)detections[n].Top, boxWidth, boxHeight, colors[detections[n].ClassID]); + float4 color = colors[detections[n].ClassID]; + + if( detections[n].TrackID >= 0 ) + color.w *= 1.0f - (fminf(detections[n].TrackLost, 15.0f) / 15.0f); + + gpuDetectionOverlayBox<<>>(input, output, width, height, (int)detections[n].Left, (int)detections[n].Top, boxWidth, boxHeight, color); } return cudaGetLastError(); diff --git a/c/detectNet.h b/c/detectNet.h index 8c269cb23..5ccc3b45a 100644 --- a/c/detectNet.h +++ b/c/detectNet.h @@ -104,7 +104,7 @@ " --alpha=ALPHA overlay alpha blending value, range 0-255 (default: 120)\n" \ " --overlay=OVERLAY detection overlay flags (e.g. --overlay=box,labels,conf)\n" \ " valid combinations are: 'box', 'lines', 'labels', 'conf', 'none'\n" \ - " --profile enable layer profiling in TensorRT\n\n" + " --profile enable layer profiling in TensorRT\n\n" \ // forward declarations @@ -128,8 +128,9 @@ class detectNet : public tensorNet float Confidence; /**< Confidence value of the detected object. */ // Tracking Info - int Instance; /**< Unique tracking ID (or -1 if untracked) */ - int TrackFrames; /**< The number of frames the object has been positively tracked for */ + int TrackID; /**< Unique tracking ID (or -1 if untracked) */ + int TrackStatus; /**< -1 for dropped, 0 for initializing, 1 for active/valid */ + int TrackFrames; /**< The number of frames the object has been re-identified for */ int TrackLost; /**< The number of consecutive frames tracking has been lost for */ // Bounding Box Coordinates @@ -193,7 +194,7 @@ class detectNet : public tensorNet inline bool Expand( const Detection& det ) { if(!Overlaps(det)) return false; Left = fminf(det.Left, Left); Top = fminf(det.Top, Top); Right = fmaxf(det.Right, Right); Bottom = fmaxf(det.Bottom, Bottom); return true; } /**< Reset all member variables to zero */ - inline void Reset() { ClassID = 0; Confidence = 0; Instance = -1; TrackFrames = 0; TrackLost = 0; Left = 0; Right = 0; Top = 0; Bottom = 0; } + inline void Reset() { ClassID = 0; Confidence = 0; TrackID = -1; TrackStatus = -1; TrackFrames = 0; TrackLost = 0; Left = 0; Right = 0; Top = 0; Bottom = 0; } /**< Default constructor */ inline Detection() { Reset(); } @@ -208,7 +209,8 @@ class detectNet : public tensorNet OVERLAY_BOX = (1 << 0), /**< Overlay the object bounding boxes (filled) */ OVERLAY_LABEL = (1 << 1), /**< Overlay the class description labels */ OVERLAY_CONFIDENCE = (1 << 2), /**< Overlay the detection confidence values */ - OVERLAY_LINES = (1 << 3), /**< Overlay the bounding box lines (unfilled) */ + OVERLAY_TRACKING = (1 << 3), /**< Overlay tracking information (like track ID) */ + OVERLAY_LINES = (1 << 4), /**< Overlay the bounding box lines (unfilled) */ OVERLAY_DEFAULT = OVERLAY_BOX|OVERLAY_LABEL|OVERLAY_CONFIDENCE, /**< The default choice of overlay */ }; diff --git a/c/trackers/objectTracker.cpp b/c/trackers/objectTracker.cpp index 3145031ca..09af1bc5d 100644 --- a/c/trackers/objectTracker.cpp +++ b/c/trackers/objectTracker.cpp @@ -19,9 +19,11 @@ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ - + +#include "detectNet.h" #include "objectTracker.h" + #include "objectTrackerIOU.h" #include "objectTrackerKLT.h" @@ -29,29 +31,44 @@ // Create objectTracker* objectTracker::Create( objectTracker::Type type ) { + objectTracker* tracker = NULL; + if( type == KLT ) { #if HAS_VPI - return objectTrackerKLT::Create(); + tracker = objectTrackerKLT::Create(); #else LogError(LOG_TRACKER "couldn't create KLT tracker (not built with VPI enabled)\n"); - return NULL; #endif } else if( type == IOU ) { - return objectTrackerIOU::Create(); + tracker = objectTrackerIOU::Create(); } - return NULL; + if( !tracker ) + return NULL; + + if( !tracker->Init() ) + { + delete tracker; + return NULL; + } + + return tracker; } // Create objectTracker* objectTracker::Create( const commandLine& cmdLine ) { - const char* str = cmdLine.GetString("tracker", cmdLine.GetString("tracking")); - const Type type = TypeFromStr(str); + Type type = IOU; + + const bool useDefault = cmdLine.GetFlag("tracking"); + const char* typeStr = cmdLine.GetString("tracker", cmdLine.GetString("tracking")); + + if( !useDefault ) + type = TypeFromStr(typeStr); if( type == KLT ) { @@ -68,8 +85,8 @@ objectTracker* objectTracker::Create( const commandLine& cmdLine ) } else { - if( str != NULL ) - LogError(LOG_TRACKER "tried to create invalid object tracker type: %s\n", str); + if( typeStr != NULL ) + LogError(LOG_TRACKER "tried to create invalid object tracker type: %s\n", typeStr); } return NULL; diff --git a/c/trackers/objectTracker.h b/c/trackers/objectTracker.h index c74903e78..9b10f7bc8 100644 --- a/c/trackers/objectTracker.h +++ b/c/trackers/objectTracker.h @@ -28,12 +28,23 @@ /** - * Tracker logging prefix + * Standard command-line options able to be passed to detectNet::Create() + * @ingroup objectTracker + */ +#define OBJECT_TRACKER_USAGE_STRING "objectTracker arguments: \n" \ + " --tracking flag to enable default tracker (IOU)\n" \ + " --tracker=TRACKER enable tracking with 'IOU' or 'KLT'\n" \ + " --tracker-min-frames=N the number of re-identified frames for a track to be considered valid (default: 3)\n" \ + " --tracker-lost-frames=N number of consecutive lost frames before a track is removed (default: 15)\n" \ + " --tracker-overlap=N how much IOU overlap is required for a bounding box to be matched (default: 0.5)\n\n" \ + +/** + * Object tracker logging prefix * @ingroup objectTracker */ #define LOG_TRACKER "[tracker] " - + /** * Object tracker interface * @ingroup objectTracker @@ -66,6 +77,11 @@ class objectTracker */ static objectTracker* Create( const commandLine& cmdLine ); + /** + * Init (optional) + */ + virtual bool Init() { return true; } + /** * Process */ @@ -81,6 +97,11 @@ class objectTracker */ virtual Type GetType() const = 0; + /** + * Usage string for command line arguments to Create() + */ + static inline const char* Usage() { return OBJECT_TRACKER_USAGE_STRING; } + /** * Convert a Type enum to string. */ diff --git a/c/trackers/objectTrackerIOU.cpp b/c/trackers/objectTrackerIOU.cpp index 67073a14f..b9a8cd5cb 100644 --- a/c/trackers/objectTrackerIOU.cpp +++ b/c/trackers/objectTrackerIOU.cpp @@ -24,10 +24,15 @@ // constructor -objectTrackerIOU::objectTrackerIOU() +objectTrackerIOU::objectTrackerIOU( uint32_t minFrames, uint32_t dropFrames, float overlapThreshold ) { + mIDCount = 0; mFrameCount = 0; - mInstanceCount = 0; + + mMinFrames = minFrames; + mDropFrames = dropFrames; + + mOverlapThreshold = overlapThreshold; mTracks.reserve(128); } @@ -39,10 +44,11 @@ objectTrackerIOU::~objectTrackerIOU() } + // Create -objectTrackerIOU* objectTrackerIOU::Create() +objectTrackerIOU* objectTrackerIOU::Create( uint32_t minFrames, uint32_t dropFrames, float overlapThreshold ) { - objectTrackerIOU* tracker = new objectTrackerIOU(); + objectTrackerIOU* tracker = new objectTrackerIOU(minFrames, dropFrames, overlapThreshold); if( !tracker ) return NULL; @@ -54,7 +60,9 @@ objectTrackerIOU* objectTrackerIOU::Create() // Create objectTrackerIOU* objectTrackerIOU::Create( const commandLine& cmdLine ) { - return Create(); + return Create(cmdLine.GetUnsignedInt("tracker-min-frames", OBJECT_TRACKER_DEFAULT_MIN_FRAMES), + cmdLine.GetUnsignedInt("tracker-drop-frames", OBJECT_TRACKER_DEFAULT_DROP_FRAMES), + cmdLine.GetFloat("tracker-overlap", OBJECT_TRACKER_DEFAULT_OVERLAP_THRESHOLD)); } @@ -73,7 +81,7 @@ int findBestIOU( const detectNet::Detection& track, detectNet::Detection* detect for( int n=0; n < numDetections; n++ ) { - if( detections[n].Instance >= 0 ) + if( detections[n].TrackID >= 0 ) continue; // this bbox is already a match for another track if( detections[n].ClassID != track.ClassID ) @@ -91,44 +99,49 @@ int findBestIOU( const detectNet::Detection& track, detectNet::Detection* detect return maxDetection; } - + // Process int objectTrackerIOU::Process( void* input, uint32_t width, uint32_t height, imageFormat format, detectNet::Detection* detections, int numDetections ) { // update active tracks for( int n=0; n < mTracks.size(); n++ ) { - const int bestMatch = findBestIOU(mTracks[n], detections, numDetections); + const int bestMatch = findBestIOU(mTracks[n], detections, numDetections, mOverlapThreshold); if( bestMatch >= 0 ) { - detections[bestMatch].Instance = mTracks[n].Instance; + detections[bestMatch].TrackID = (mTracks[n].TrackFrames == mMinFrames) ? mIDCount++ : mTracks[n].TrackID; detections[bestMatch].TrackFrames = mTracks[n].TrackFrames + 1; + detections[bestMatch].TrackStatus = (detections[bestMatch].TrackFrames >= mMinFrames) ? 1 : 0; detections[bestMatch].TrackLost = 0; mTracks[n] = detections[bestMatch]; - LogVerbose(LOG_TRACKER "updated track -> instance=%i class=%u frames=%i\n", detections[n].Instance, detections[n].ClassID, detections[n].TrackFrames); + LogVerbose(LOG_TRACKER "updated track %i -> class=%u status=%i frames=%i\n", detections[n].TrackID, detections[n].ClassID, detections[n].TrackStatus, detections[n].TrackFrames); } else { mTracks[n].TrackLost++; + + if( mTracks[n].TrackLost >= mDropFrames ) + mTracks[n].TrackStatus = -1; } } // add new tracks for( int n=0; n < numDetections; n++ ) { - if( detections[n].Instance >= 0 ) + if( detections[n].TrackID >= 0 ) continue; - detections[n].Instance = mInstanceCount++; + detections[n].TrackID = -1; + detections[n].TrackStatus = 0; detections[n].TrackFrames = 0; detections[n].TrackLost = 0; mTracks.push_back(detections[n]); - LogVerbose(LOG_TRACKER "added track -> instance=%i class=%u\n", detections[n].Instance, detections[n].ClassID); + LogVerbose(LOG_TRACKER "added track %i -> class=%u\n", detections[n].TrackID, detections[n].ClassID); } // add valid tracks to the output array @@ -136,16 +149,16 @@ int objectTrackerIOU::Process( void* input, uint32_t width, uint32_t height, ima for( int n=0; n < mTracks.size(); n++ ) { - if( mTracks[n].TrackFrames >= 3 ) + if( mTracks[n].TrackFrames >= mMinFrames ) detections[numDetections++] = mTracks[n]; } // remove dropped tracks for( auto iter = mTracks.begin(); iter != mTracks.end(); ) { - if( iter->TrackLost > 15 ) + if( iter->TrackStatus < 0 ) { - LogVerbose(LOG_TRACKER "dropped track -> instance=%i class=%u frames=%i\n", iter->Instance, iter->ClassID, iter->TrackFrames); + LogVerbose(LOG_TRACKER "dropped track %i -> class=%u frames=%i\n", iter->TrackID, iter->ClassID, iter->TrackFrames); iter = mTracks.erase(iter); } else diff --git a/c/trackers/objectTrackerIOU.h b/c/trackers/objectTrackerIOU.h index 702278159..8ab9404ad 100644 --- a/c/trackers/objectTrackerIOU.h +++ b/c/trackers/objectTrackerIOU.h @@ -27,10 +27,33 @@ #include "objectTracker.h" +/** + * The number of re-identified frames before establishing a track + * @ingroup objectTracker + */ +#define OBJECT_TRACKER_DEFAULT_MIN_FRAMES 3 + +/** + * The number of consecutive lost frames after which a track is removed + * @ingroup objectTracker + */ +#define OBJECT_TRACKER_DEFAULT_DROP_FRAMES 15 + +/** + * How much IOU overlap is required for a bounding box to be matched + */ +#define OBJECT_TRACKER_DEFAULT_OVERLAP_THRESHOLD 0.5 + + /** * Object tracker using Intersection-Over-Union (IOU) + * * "High-Speed Tracking-by-Detection Without Using Image Information" * http://elvera.nue.tu-berlin.de/files/1517Bochinski2017.pdf + * + * This tracker essentially performs temporal clustering of bounding boxes + * without using visual information, hence it is very fast but low accuracy. + * * @ingroup objectTracker */ class objectTrackerIOU : public objectTracker @@ -38,8 +61,12 @@ class objectTrackerIOU : public objectTracker public: /** * Create a new object tracker. + * @param minFrames the number of re-identified frames before before establishing a track + * @param dropFrames the number of consecutive lost frames after which a track is removed */ - static objectTrackerIOU* Create(); + static objectTrackerIOU* Create( uint32_t minFrames=OBJECT_TRACKER_DEFAULT_MIN_FRAMES, + uint32_t dropFrames=OBJECT_TRACKER_DEFAULT_DROP_FRAMES, + float overlapThreshold=OBJECT_TRACKER_DEFAULT_OVERLAP_THRESHOLD ); /** * Create a new object tracker by parsing the command line. @@ -57,21 +84,56 @@ class objectTrackerIOU : public objectTracker ~objectTrackerIOU(); /** - * GetType + * The number of re-identified frames before before establishing a track + */ + inline uint32_t GetMinFrames() const { return mMinFrames; } + + /** + * Set the number of re-identified frames before before establishing a track */ - inline virtual Type GetType() const { return IOU; } + inline void SetMinFrames( uint32_t frames ) { mMinFrames = frames; } /** - * Process + * The number of consecutive lost frames after which a track is removed + */ + inline uint32_t GetDropFrames() const { return mDropFrames; } + + /** + * Set the number of re-identified frames before before establishing a track + */ + inline void SetDropFrames( uint32_t frames ) { mDropFrames = frames; } + + /** + * How much IOU overlap is required for a bounding box to be matched + */ + inline float GetOverlapThreshold() const { return mOverlapThreshold; } + + /** + * Set how much IOU overlap is required for a bounding box to be matched + */ + inline void SetOverlapThreshold( float threshold ) { mOverlapThreshold = threshold; } + + /** + * @see objectTracker::GetType + */ + inline virtual Type GetType() const { return IOU; } + + /** + * @see objectTracker::Process */ virtual int Process( void* image, uint32_t width, uint32_t height, imageFormat format, detectNet::Detection* detections, int numDetections ); protected: - objectTrackerIOU(); + objectTrackerIOU( uint32_t minFrames, uint32_t dropFrames, float overlapThreshold ); + uint32_t mIDCount; uint64_t mFrameCount; - uint32_t mInstanceCount; + uint32_t mMinFrames; + uint32_t mDropFrames; + + float mOverlapThreshold; + std::vector mTracks; }; diff --git a/examples/detectnet/detectnet.cpp b/examples/detectnet/detectnet.cpp index 8f08ce15e..df48be1ce 100644 --- a/examples/detectnet/detectnet.cpp +++ b/examples/detectnet/detectnet.cpp @@ -24,6 +24,8 @@ #include "videoOutput.h" #include "detectNet.h" +#include "objectTracker.h" + #include @@ -49,14 +51,15 @@ void sig_handler(int signo) int usage() { printf("usage: detectnet [--help] [--network=NETWORK] [--threshold=THRESHOLD] ...\n"); - printf(" input_URI [output_URI]\n\n"); + printf(" input [output]\n\n"); printf("Locate objects in a video/image stream using an object detection DNN.\n"); printf("See below for additional arguments that may not be shown above.\n\n"); printf("positional arguments:\n"); - printf(" input_URI resource URI of input stream (see videoSource below)\n"); - printf(" output_URI resource URI of output stream (see videoOutput below)\n\n"); + printf(" input resource URI of input stream (see videoSource below)\n"); + printf(" output resource URI of output stream (see videoOutput below)\n\n"); printf("%s", detectNet::Usage()); + printf("%s", objectTracker::Usage()); printf("%s", videoSource::Usage()); printf("%s", videoOutput::Usage()); printf("%s", Log::Usage()); @@ -150,8 +153,8 @@ int main( int argc, char** argv ) LogVerbose("\ndetected obj %i class #%u (%s) confidence=%f\n", n, detections[n].ClassID, net->GetClassDesc(detections[n].ClassID), detections[n].Confidence); LogVerbose("bounding box %i (%.2f, %.2f) (%.2f, %.2f) w=%.2f h=%.2f\n", n, detections[n].Left, detections[n].Top, detections[n].Right, detections[n].Bottom, detections[n].Width(), detections[n].Height()); - if( detections[n].Instance >= 0 ) // is this a tracked object? - LogVerbose("tracking instance %i frames=%i lost=%i\n", detections[n].Instance, detections[n].TrackFrames, detections[n].TrackLost); + if( detections[n].TrackID >= 0 ) // is this a tracked object? + LogVerbose("tracking ID %i status=%i frames=%i lost=%i\n", detections[n].TrackID, detections[n].TrackStatus, detections[n].TrackFrames, detections[n].TrackLost); } } diff --git a/examples/object-tracker/object-tracker.cpp b/examples/object-tracker/object-tracker.cpp index 8ab98883e..53c607968 100644 --- a/examples/object-tracker/object-tracker.cpp +++ b/examples/object-tracker/object-tracker.cpp @@ -51,6 +51,7 @@ int usage() printf(" output_URI resource URI of output stream (see videoOutput below)\n\n"); printf("%s", detectNet::Usage()); + printf("%s", objectTracker::Usage()); printf("%s", videoSource::Usage()); printf("%s", videoOutput::Usage()); printf("%s", Log::Usage()); @@ -167,7 +168,7 @@ int main( int argc, char** argv ) for( int n=0; n < numTracks; n++ ) { - LogVerbose("tracked obj %i class #%u (%s) confidence=%f instance=%i frames=%i lost=%i\n", n, detections[n].ClassID, net->GetClassDesc(detections[n].ClassID), detections[n].Confidence, detections[n].Instance, detections[n].TrackFrames, detections[n].TrackLost); + LogVerbose("tracked obj %i class #%u (%s) confidence=%f instance=%i frames=%i lost=%i\n", n, detections[n].ClassID, net->GetClassDesc(detections[n].ClassID), detections[n].Confidence, detections[n].TrackID, detections[n].TrackFrames, detections[n].TrackLost); LogVerbose("bounding box %i (%f, %f) (%f, %f) w=%f h=%f\n", n, detections[n].Left, detections[n].Top, detections[n].Right, detections[n].Bottom, detections[n].Width(), detections[n].Height()); } } diff --git a/python/bindings/PyDetectNet.cpp b/python/bindings/PyDetectNet.cpp index 981f78776..65d295f7f 100644 --- a/python/bindings/PyDetectNet.cpp +++ b/python/bindings/PyDetectNet.cpp @@ -46,12 +46,12 @@ typedef struct { " Center (x,y) coordinate of bounding box\n\n" \ "ClassID\n" \ " Class index of the detected object\n\n" \ + "TrackID\n" \ + " Unique tracking ID (or -1 if untracked)\n\n" \ "Confidence\n" \ " Confidence value of the detected object\n\n" \ "Height\n" \ " Height of bounding box\n\n" \ - "Instance\n" \ - " Instance index of the detected object\n\n" \ "Left\n" \ " Left bounding box coordinate\n\n" \ "Right\n" \ @@ -136,15 +136,16 @@ static PyObject* PyDetection_ToString( PyDetection_Object* self ) // format string char str[4096]; - if( self->det.Instance >= 0 ) + if( self->det.TrackID >= 0 ) { sprintf(str, "\n" - " -- ClassID: %i\n" - " -- Confidence: %g\n" - " -- Instance: %i\n" - " -- Track Frames: %i\n" - " -- Track Lost: %i\n" + " -- Confidence: %g\n" + " -- ClassID: %i\n" + " -- TrackID: %i\n" + " -- TrackStatus: %i\n" + " -- TrackFrames: %i\n" + " -- TrackLost: %i\n" " -- Left: %g\n" " -- Top: %g\n" " -- Right: %g\n" @@ -153,8 +154,8 @@ static PyObject* PyDetection_ToString( PyDetection_Object* self ) " -- Height: %g\n" " -- Area: %g\n" " -- Center: (%g, %g)", - self->det.ClassID, self->det.Confidence, - self->det.Instance, self->det.TrackFrames, self->det.TrackLost, + self->det.Confidence, self->det.ClassID, + self->det.TrackID, self->det.TrackStatus, self->det.TrackFrames, self->det.TrackLost, self->det.Left, self->det.Top, self->det.Right, self->det.Bottom, self->det.Width(), self->det.Height(), self->det.Area(), cx, cy); } @@ -162,8 +163,8 @@ static PyObject* PyDetection_ToString( PyDetection_Object* self ) { sprintf(str, "\n" - " -- ClassID: %i\n" " -- Confidence: %g\n" + " -- ClassID: %i\n" " -- Left: %g\n" " -- Top: %g\n" " -- Right: %g\n" @@ -172,7 +173,7 @@ static PyObject* PyDetection_ToString( PyDetection_Object* self ) " -- Height: %g\n" " -- Area: %g\n" " -- Center: (%g, %g)", - self->det.ClassID, self->det.Confidence, + self->det.Confidence, self->det.ClassID, self->det.Left, self->det.Top, self->det.Right, self->det.Bottom, self->det.Width(), self->det.Height(), self->det.Area(), cx, cy); } @@ -203,18 +204,18 @@ static PyObject* PyDetection_Contains( PyDetection_Object* self, PyObject *args, } -// GetInstance -static PyObject* PyDetection_GetInstance( PyDetection_Object* self, void* closure ) +// GetTrackID +static PyObject* PyDetection_GetTrackID( PyDetection_Object* self, void* closure ) { - return PYLONG_FROM_LONG(self->det.Instance); + return PYLONG_FROM_LONG(self->det.TrackID); } -// SetInstance -static int PyDetection_SetInstance( PyDetection_Object* self, PyObject* value, void* closure ) +// SetTrackID +static int PyDetection_SetTrackID( PyDetection_Object* self, PyObject* value, void* closure ) { if( !value ) { - PyErr_SetString(PyExc_TypeError, LOG_PY_INFERENCE "Not permitted to delete detectNet.Detection.Instance attribute"); + PyErr_SetString(PyExc_TypeError, LOG_PY_INFERENCE "Not permitted to delete detectNet.Detection.TrackID attribute"); return -1; } @@ -223,7 +224,7 @@ static int PyDetection_SetInstance( PyDetection_Object* self, PyObject* value, v if( PyErr_Occurred() != NULL ) return -1; - self->det.Instance = arg; + self->det.TrackID = arg; return 0; } @@ -441,8 +442,9 @@ static PyObject* PyDetection_GetROI( PyDetection_Object* self, void* closure ) static PyGetSetDef pyDetection_GetSet[] = { - { "Instance", (getter)PyDetection_GetInstance, (setter)PyDetection_SetInstance, "Instance index of the detected object", NULL}, { "ClassID", (getter)PyDetection_GetClassID, (setter)PyDetection_SetClassID, "Class index of the detected object", NULL}, + { "TrackID", (getter)PyDetection_GetTrackID, (setter)PyDetection_SetTrackID, "Unique tracking ID (-1 if untracked)", NULL}, + { "Instance", (getter)PyDetection_GetTrackID, (setter)PyDetection_SetTrackID, "Unique tracking ID (-1 if untracked)", NULL}, // legacy { "Confidence", (getter)PyDetection_GetConfidence, (setter)PyDetection_SetConfidence, "Confidence value of the detected object", NULL}, { "Left", (getter)PyDetection_GetLeft, (setter)PyDetection_SetLeft, "Left bounding box coordinate", NULL}, { "Right", (getter)PyDetection_GetRight, (setter)PyDetection_SetRight, "Right bounding box coordinate", NULL},