diff --git a/.gitignore b/.gitignore index a81ba0e..ca701c7 100644 --- a/.gitignore +++ b/.gitignore @@ -37,3 +37,5 @@ Podfile.lock Carthage/Checkouts Carthage/Build +#Core ML Model files +*.mlmodel \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..37753d1 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "ObjectDetection-CoreML/yolov5"] + path = ObjectDetection-CoreML/yolov5 + url = git@github.com:hietalajulius/yolov5.git diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML.xcodeproj/project.pbxproj b/ObjectDetection-CoreML/ObjectDetection-CoreML.xcodeproj/project.pbxproj new file mode 100644 index 0000000..2e610ed --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML.xcodeproj/project.pbxproj @@ -0,0 +1,373 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 55; + objects = { + +/* Begin PBXBuildFile section */ + E956E0C428AB7926005A1871 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E956E0C328AB7926005A1871 /* AppDelegate.swift */; }; + E956E0C628AB7926005A1871 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E956E0C528AB7926005A1871 /* SceneDelegate.swift */; }; + E956E0C828AB7926005A1871 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = E956E0C728AB7926005A1871 /* ViewController.swift */; }; + E956E0CB28AB7926005A1871 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E956E0C928AB7926005A1871 /* Main.storyboard */; }; + E956E0CD28AB7928005A1871 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = E956E0CC28AB7928005A1871 /* Assets.xcassets */; }; + E956E0D028AB7928005A1871 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E956E0CE28AB7928005A1871 /* LaunchScreen.storyboard */; }; + E956E0DA28AB86BF005A1871 /* yolov5n.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = E956E0D928AB86BF005A1871 /* yolov5n.mlmodel */; }; + E956E0DC28ACD391005A1871 /* Utils.swift in Sources */ = {isa = PBXBuildFile; fileRef = E956E0DB28ACD391005A1871 /* Utils.swift */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + E956E0C028AB7926005A1871 /* ObjectDetection-CoreML.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "ObjectDetection-CoreML.app"; sourceTree = BUILT_PRODUCTS_DIR; }; + E956E0C328AB7926005A1871 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + E956E0C528AB7926005A1871 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; + E956E0C728AB7926005A1871 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; + E956E0CA28AB7926005A1871 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + E956E0CC28AB7928005A1871 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + E956E0CF28AB7928005A1871 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + E956E0D128AB7928005A1871 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + E956E0D928AB86BF005A1871 /* yolov5n.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = yolov5n.mlmodel; sourceTree = ""; }; + E956E0DB28ACD391005A1871 /* Utils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Utils.swift; sourceTree = ""; }; + E956E0DE28ACF6CC005A1871 /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Main.strings; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + E956E0BD28AB7926005A1871 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + E956E0B728AB7926005A1871 = { + isa = PBXGroup; + children = ( + E956E0C228AB7926005A1871 /* ObjectDetection-CoreML */, + E956E0C128AB7926005A1871 /* Products */, + ); + sourceTree = ""; + }; + E956E0C128AB7926005A1871 /* Products */ = { + isa = PBXGroup; + children = ( + E956E0C028AB7926005A1871 /* ObjectDetection-CoreML.app */, + ); + name = Products; + sourceTree = ""; + }; + E956E0C228AB7926005A1871 /* ObjectDetection-CoreML */ = { + isa = PBXGroup; + children = ( + E956E0C328AB7926005A1871 /* AppDelegate.swift */, + E956E0C528AB7926005A1871 /* SceneDelegate.swift */, + E956E0C728AB7926005A1871 /* ViewController.swift */, + E956E0C928AB7926005A1871 /* Main.storyboard */, + E956E0CC28AB7928005A1871 /* Assets.xcassets */, + E956E0CE28AB7928005A1871 /* LaunchScreen.storyboard */, + E956E0D128AB7928005A1871 /* Info.plist */, + E956E0D928AB86BF005A1871 /* yolov5n.mlmodel */, + E956E0DB28ACD391005A1871 /* Utils.swift */, + ); + path = "ObjectDetection-CoreML"; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + E956E0BF28AB7926005A1871 /* ObjectDetection-CoreML */ = { + isa = PBXNativeTarget; + buildConfigurationList = E956E0D428AB7928005A1871 /* Build configuration list for PBXNativeTarget "ObjectDetection-CoreML" */; + buildPhases = ( + E956E0BC28AB7926005A1871 /* Sources */, + E956E0BD28AB7926005A1871 /* Frameworks */, + E956E0BE28AB7926005A1871 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = "ObjectDetection-CoreML"; + productName = "ObjectDetection-CoreML"; + productReference = E956E0C028AB7926005A1871 /* ObjectDetection-CoreML.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + E956E0B828AB7926005A1871 /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 1340; + LastUpgradeCheck = 1340; + TargetAttributes = { + E956E0BF28AB7926005A1871 = { + CreatedOnToolsVersion = 13.4.1; + }; + }; + }; + buildConfigurationList = E956E0BB28AB7926005A1871 /* Build configuration list for PBXProject "ObjectDetection-CoreML" */; + compatibilityVersion = "Xcode 13.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = E956E0B728AB7926005A1871; + productRefGroup = E956E0C128AB7926005A1871 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + E956E0BF28AB7926005A1871 /* ObjectDetection-CoreML */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + E956E0BE28AB7926005A1871 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + E956E0D028AB7928005A1871 /* LaunchScreen.storyboard in Resources */, + E956E0CD28AB7928005A1871 /* Assets.xcassets in Resources */, + E956E0CB28AB7926005A1871 /* Main.storyboard in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + E956E0BC28AB7926005A1871 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + E956E0DC28ACD391005A1871 /* Utils.swift in Sources */, + E956E0C828AB7926005A1871 /* ViewController.swift in Sources */, + E956E0C428AB7926005A1871 /* AppDelegate.swift in Sources */, + E956E0C628AB7926005A1871 /* SceneDelegate.swift in Sources */, + E956E0DA28AB86BF005A1871 /* yolov5n.mlmodel in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXVariantGroup section */ + E956E0C928AB7926005A1871 /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + E956E0CA28AB7926005A1871 /* Base */, + E956E0DE28ACF6CC005A1871 /* en */, + ); + name = Main.storyboard; + sourceTree = ""; + }; + E956E0CE28AB7928005A1871 /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + E956E0CF28AB7928005A1871 /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + E956E0D228AB7928005A1871 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++17"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 15.5; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + E956E0D328AB7928005A1871 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++17"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 15.5; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + E956E0D528AB7928005A1871 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = CLFN2N8XXS; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = "ObjectDetection-CoreML/Info.plist"; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; + INFOPLIST_KEY_UIMainStoryboardFile = Main; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.hietalajulius.ObjectDetection-CoreML"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + E956E0D628AB7928005A1871 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = CLFN2N8XXS; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = "ObjectDetection-CoreML/Info.plist"; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; + INFOPLIST_KEY_UIMainStoryboardFile = Main; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.hietalajulius.ObjectDetection-CoreML"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + E956E0BB28AB7926005A1871 /* Build configuration list for PBXProject "ObjectDetection-CoreML" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + E956E0D228AB7928005A1871 /* Debug */, + E956E0D328AB7928005A1871 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + E956E0D428AB7928005A1871 /* Build configuration list for PBXNativeTarget "ObjectDetection-CoreML" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + E956E0D528AB7928005A1871 /* Debug */, + E956E0D628AB7928005A1871 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = E956E0B828AB7926005A1871 /* Project object */; +} diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/AppDelegate.swift b/ObjectDetection-CoreML/ObjectDetection-CoreML/AppDelegate.swift new file mode 100644 index 0000000..ff0341a --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/AppDelegate.swift @@ -0,0 +1,36 @@ +// +// AppDelegate.swift +// ObjectDetection-CoreML +// +// Created by Julius Hietala on 16.8.2022. +// + +import UIKit + +@main +class AppDelegate: UIResponder, UIApplicationDelegate { + + + + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { + // Override point for customization after application launch. + return true + } + + // MARK: UISceneSession Lifecycle + + func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration { + // Called when a new scene session is being created. + // Use this method to select a configuration to create the new scene with. + return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role) + } + + func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set) { + // Called when the user discards a scene session. + // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions. + // Use this method to release any resources that were specific to the discarded scenes, as they will not return. + } + + +} + diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AccentColor.colorset/Contents.json b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 0000000..eb87897 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Contents.json b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..99f4708 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,104 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "20x20" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "20x20" + }, + { + "filename" : "Icon-Small@2x-1.png", + "idiom" : "iphone", + "scale" : "2x", + "size" : "29x29" + }, + { + "filename" : "Icon-Small@3x.png", + "idiom" : "iphone", + "scale" : "3x", + "size" : "29x29" + }, + { + "filename" : "Icon-Small-40@2x.png", + "idiom" : "iphone", + "scale" : "2x", + "size" : "40x40" + }, + { + "filename" : "Icon-Small-60@2x.png", + "idiom" : "iphone", + "scale" : "3x", + "size" : "40x40" + }, + { + "filename" : "Icon-60@2x.png", + "idiom" : "iphone", + "scale" : "2x", + "size" : "60x60" + }, + { + "filename" : "Icon-60@3x.png", + "idiom" : "iphone", + "scale" : "3x", + "size" : "60x60" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "20x20" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "20x20" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "29x29" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "29x29" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "40x40" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "40x40" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "76x76" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "76x76" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "83.5x83.5" + }, + { + "idiom" : "ios-marketing", + "scale" : "1x", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-60@2x.png b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-60@2x.png new file mode 100644 index 0000000..8ce337f --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-60@2x.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:007471221a2fd56c0d8307beba13decbc03e7284a956ae597e2be5ae74b40f69 +size 15078 diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-60@3x.png b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-60@3x.png new file mode 100644 index 0000000..89888a3 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-60@3x.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e302269045922de2cefca08f44a792105d367ce35808c5dbd03df4f0bde2c474 +size 26669 diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x.png b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x.png new file mode 100644 index 0000000..7485338 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e6ffe9703d96eaae3dd5ea7bff4fa932d88a8b952c66111f8f1799d33ae8656 +size 8071 diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small-60@2x.png b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small-60@2x.png new file mode 100644 index 0000000..636348c --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small-60@2x.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c527b43a925f5e9248ae2ea51b372bfcc7389c9a7d6899b159e946cd9c44586 +size 15161 diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x-1.png b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x-1.png new file mode 100644 index 0000000..635e794 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x-1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9988727116307ffe5b6af9fbc5db52c3fbb275ea763da80a608bd825e6c04c0f +size 4863 diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small@3x.png b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small@3x.png new file mode 100644 index 0000000..541f707 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/AppIcon.appiconset/Icon-Small@3x.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f169b9dcf495ea364f99d686284f907dbae42b84b1f848f7e36e735db712922 +size 9289 diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/Contents.json b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/Contents.json new file mode 100644 index 0000000..73c0059 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Base.lproj/LaunchScreen.storyboard b/ObjectDetection-CoreML/ObjectDetection-CoreML/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 0000000..865e932 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Base.lproj/Main.storyboard b/ObjectDetection-CoreML/ObjectDetection-CoreML/Base.lproj/Main.storyboard new file mode 100644 index 0000000..b1e3203 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Base.lproj/Main.storyboard @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Info.plist b/ObjectDetection-CoreML/ObjectDetection-CoreML/Info.plist new file mode 100644 index 0000000..020db65 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Info.plist @@ -0,0 +1,27 @@ + + + + + NSCameraUsageDescription + This sample uses the camera to capture images of items for object recognition using YOLOv5. + UIApplicationSceneManifest + + UIApplicationSupportsMultipleScenes + + UISceneConfigurations + + UIWindowSceneSessionRoleApplication + + + UISceneConfigurationName + Default Configuration + UISceneDelegateClassName + $(PRODUCT_MODULE_NAME).SceneDelegate + UISceneStoryboardFile + Main + + + + + + diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/SceneDelegate.swift b/ObjectDetection-CoreML/ObjectDetection-CoreML/SceneDelegate.swift new file mode 100644 index 0000000..781bb41 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/SceneDelegate.swift @@ -0,0 +1,52 @@ +// +// SceneDelegate.swift +// ObjectDetection-CoreML +// +// Created by Julius Hietala on 16.8.2022. +// + +import UIKit + +class SceneDelegate: UIResponder, UIWindowSceneDelegate { + + var window: UIWindow? + + + func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { + // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. + // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. + // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). + guard let _ = (scene as? UIWindowScene) else { return } + } + + func sceneDidDisconnect(_ scene: UIScene) { + // Called as the scene is being released by the system. + // This occurs shortly after the scene enters the background, or when its session is discarded. + // Release any resources associated with this scene that can be re-created the next time the scene connects. + // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). + } + + func sceneDidBecomeActive(_ scene: UIScene) { + // Called when the scene has moved from an inactive state to an active state. + // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. + } + + func sceneWillResignActive(_ scene: UIScene) { + // Called when the scene will move from an active state to an inactive state. + // This may occur due to temporary interruptions (ex. an incoming phone call). + } + + func sceneWillEnterForeground(_ scene: UIScene) { + // Called as the scene transitions from the background to the foreground. + // Use this method to undo the changes made on entering the background. + } + + func sceneDidEnterBackground(_ scene: UIScene) { + // Called as the scene transitions from the foreground to the background. + // Use this method to save data, release shared resources, and store enough scene-specific state information + // to restore the scene back to its current state. + } + + +} + diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/Utils.swift b/ObjectDetection-CoreML/ObjectDetection-CoreML/Utils.swift new file mode 100644 index 0000000..8685973 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/Utils.swift @@ -0,0 +1,126 @@ +// +// Utils.swift +// ObjectDetection-CoreML +// +// Created by Julius Hietala on 17.8.2022. +// + +import UIKit +import Vision + +let classes = [ + "person", + "bicycle", + "car", + "motorcycle", + "airplane", + "bus", + "train", + "truck", + "boat", + "traffic light", + "fire hydrant", + "stop sign", + "parking meter", + "bench", + "bird", + "cat", + "dog", + "horse", + "sheep", + "cow", + "elephant", + "bear", + "zebra", + "giraffe", + "backpack", + "umbrella", + "handbag", + "tie", + "suitcase", + "frisbee", + "skis", + "snowboard", + "sports ball", + "kite", + "baseball bat", + "baseball glove", + "skateboard", + "surfboard", + "tennis racket", + "bottle", + "wine glass", + "cup", + "fork", + "knife", + "spoon", + "bowl", + "banana", + "apple", + "sandwich", + "orange", + "broccoli", + "carrot", + "hot dog", + "pizza", + "donut", + "cake", + "chair", + "couch", + "potted plant", + "bed", + "dining table", + "toilet", + "tv", + "laptop", + "mouse", + "remote", + "keyboard", + "cell phone", + "microwave", + "oven", + "toaster", + "sink", + "refrigerator", + "book", + "clock", + "vase", + "scissors", + "teddy bear", + "hair drier", + "toothbrush" +]; + +let colors = classes.reduce(into: [String: [CGFloat]]()) { + $0[$1] = [Double.random(in: 0.0 ..< 1.0),Double.random(in: 0.0 ..< 1.0),Double.random(in: 0.0 ..< 1.0),0.5] +} + + +func createDetectionTextLayer(_ bounds: CGRect, _ text: NSMutableAttributedString) -> CATextLayer { + let textLayer = CATextLayer() + textLayer.string = text + textLayer.bounds = CGRect(x: 0, y: 0, width: bounds.size.height - 10, height: bounds.size.width - 10) + textLayer.position = CGPoint(x: bounds.midX, y: bounds.midY) + textLayer.contentsScale = 10.0 + textLayer.setAffineTransform(CGAffineTransform(rotationAngle: CGFloat(.pi / 2.0)).scaledBy(x: 1.0, y: -1.0)) + return textLayer +} + +func createInferenceTimeTextLayer(_ bounds: CGRect, _ text: NSMutableAttributedString) -> CATextLayer { + let inferenceTimeTextLayer = CATextLayer() + inferenceTimeTextLayer.string = text + inferenceTimeTextLayer.frame = bounds + inferenceTimeTextLayer.contentsScale = 10.0 + inferenceTimeTextLayer.alignmentMode = .center + return inferenceTimeTextLayer +} + +func createRectLayer(_ bounds: CGRect, _ color: [CGFloat]) -> CALayer { + let shapeLayer = CALayer() + shapeLayer.bounds = bounds + shapeLayer.position = CGPoint(x: bounds.midX, y: bounds.midY) + shapeLayer.backgroundColor = CGColor(colorSpace: CGColorSpaceCreateDeviceRGB(), components: color) + return shapeLayer +} + + diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/ViewController.swift b/ObjectDetection-CoreML/ObjectDetection-CoreML/ViewController.swift new file mode 100644 index 0000000..a138a16 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/ViewController.swift @@ -0,0 +1,200 @@ +// +// ViewController.swift +// ObjectDetection-CoreML +// +// Created by Julius Hietala on 16.8.2022. +// + +import UIKit +import AVFoundation +import Vision + +class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { + + // Capture + var bufferSize: CGSize = .zero + var inferenceTime: CFTimeInterval = 0; + private let session = AVCaptureSession() + + // UI/Layers + @IBOutlet weak var previewView: UIView! + var rootLayer: CALayer! = nil + private var previewLayer: AVCaptureVideoPreviewLayer! = nil + private var detectionLayer: CALayer! = nil + private var inferenceTimeLayer: CALayer! = nil + private var inferenceTimeBounds: CGRect! = nil + + // Vision + private var requests = [VNRequest]() + + // Setup + override func viewDidLoad() { + super.viewDidLoad() + setupCapture() + setupOutput() + setupLayers() + try? setupVision() + session.startRunning() + } + + func setupCapture() { + var deviceInput: AVCaptureDeviceInput! + let videoDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .back).devices.first + do { + deviceInput = try AVCaptureDeviceInput(device: videoDevice!) + } catch { + print("Could not create video device input: \(error)") + return + } + + session.beginConfiguration() + session.sessionPreset = .vga640x480 + + guard session.canAddInput(deviceInput) else { + print("Could not add video device input to the session") + session.commitConfiguration() + return + } + session.addInput(deviceInput) + + do { + try videoDevice!.lockForConfiguration() + let dimensions = CMVideoFormatDescriptionGetDimensions((videoDevice?.activeFormat.formatDescription)!) + bufferSize.width = CGFloat(dimensions.width) + bufferSize.height = CGFloat(dimensions.height) + videoDevice!.unlockForConfiguration() + } catch { + print(error) + } + session.commitConfiguration() + } + + func setupOutput() { + let videoDataOutput = AVCaptureVideoDataOutput() + let videoDataOutputQueue = DispatchQueue(label: "VideoDataOutput", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem) + + if session.canAddOutput(videoDataOutput) { + session.addOutput(videoDataOutput) + videoDataOutput.alwaysDiscardsLateVideoFrames = true + videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] + videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue) + } else { + print("Could not add video data output to the session") + session.commitConfiguration() + return + } + } + + func setupLayers() { + previewLayer = AVCaptureVideoPreviewLayer(session: session) + previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill + rootLayer = previewView.layer + previewLayer.frame = rootLayer.bounds + rootLayer.addSublayer(previewLayer) + + inferenceTimeBounds = CGRect(x: rootLayer.frame.midX-75, y: rootLayer.frame.maxY-70, width: 150, height: 17) + + inferenceTimeLayer = createRectLayer(inferenceTimeBounds, [1,1,1,1]) + inferenceTimeLayer.cornerRadius = 7 + rootLayer.addSublayer(inferenceTimeLayer) + + detectionLayer = CALayer() + detectionLayer.bounds = CGRect(x: 0.0, + y: 0.0, + width: bufferSize.width, + height: bufferSize.height) + detectionLayer.position = CGPoint(x: rootLayer.bounds.midX, y: rootLayer.bounds.midY) + rootLayer.addSublayer(detectionLayer) + + let xScale: CGFloat = rootLayer.bounds.size.width / bufferSize.height + let yScale: CGFloat = rootLayer.bounds.size.height / bufferSize.width + + let scale = fmax(xScale, yScale) + + // rotate the layer into screen orientation and scale and mirror + detectionLayer.setAffineTransform(CGAffineTransform(rotationAngle: CGFloat(.pi / 2.0)).scaledBy(x: scale, y: -scale)) + // center the layer + detectionLayer.position = CGPoint(x: rootLayer.bounds.midX, y: rootLayer.bounds.midY) + } + + func setupVision() throws { + guard let modelURL = Bundle.main.url(forResource: "yolov5n", withExtension: "mlmodelc") else { + throw NSError(domain: "ViewController", code: -1, userInfo: [NSLocalizedDescriptionKey: "Model file is missing"]) + } + + do { + let visionModel = try VNCoreMLModel(for: MLModel(contentsOf: modelURL)) + let objectRecognition = VNCoreMLRequest(model: visionModel, completionHandler: { (request, error) in + DispatchQueue.main.async(execute: { + if let results = request.results { + self.drawResults(results) + } + }) + }) + self.requests = [objectRecognition] + } catch let error as NSError { + print("Model loading went wrong: \(error)") + } + } + + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + return + } + + let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .right, options: [:]) + do { + // returns true when complete https://developer.apple.com/documentation/vision/vnimagerequesthandler/2880297-perform + let start = CACurrentMediaTime() + try imageRequestHandler.perform(self.requests) + inferenceTime = (CACurrentMediaTime() - start) + + } catch { + print(error) + } + } + + func drawResults(_ results: [Any]) { + CATransaction.begin() + CATransaction.setValue(kCFBooleanTrue, forKey: kCATransactionDisableActions) + detectionLayer.sublayers = nil // Clear previous detections from detectionLayer + inferenceTimeLayer.sublayers = nil + for observation in results where observation is VNRecognizedObjectObservation { + guard let objectObservation = observation as? VNRecognizedObjectObservation else { + continue + } + + // Detection with highest confidence + let topLabelObservation = objectObservation.labels[0] + + // Rotate the bounding box into screen orientation + let boundingBox = CGRect(origin: CGPoint(x:1.0-objectObservation.boundingBox.origin.y-objectObservation.boundingBox.size.height, y:objectObservation.boundingBox.origin.x), size: CGSize(width:objectObservation.boundingBox.size.height,height:objectObservation.boundingBox.size.width)) + + let objectBounds = VNImageRectForNormalizedRect(boundingBox, Int(bufferSize.width), Int(bufferSize.height)) + + let shapeLayer = createRectLayer(objectBounds, colors[topLabelObservation.identifier]!) + + let formattedString = NSMutableAttributedString(string: String(format: "\(topLabelObservation.identifier)\n %.1f%% ", topLabelObservation.confidence*100).capitalized) + + let textLayer = createDetectionTextLayer(objectBounds, formattedString) + shapeLayer.addSublayer(textLayer) + detectionLayer.addSublayer(shapeLayer) + } + + let formattedInferenceTimeString = NSMutableAttributedString(string: String(format: "Inference time: %.1f ms ", inferenceTime*1000)) + + let inferenceTimeTextLayer = createInferenceTimeTextLayer(inferenceTimeBounds, formattedInferenceTimeString) + + inferenceTimeLayer.addSublayer(inferenceTimeTextLayer) + + CATransaction.commit() + } + + // Clean up capture setup + func teardownAVCapture() { + previewLayer.removeFromSuperlayer() + previewLayer = nil + } + +} + diff --git a/ObjectDetection-CoreML/ObjectDetection-CoreML/en.lproj/Main.strings b/ObjectDetection-CoreML/ObjectDetection-CoreML/en.lproj/Main.strings new file mode 100644 index 0000000..846b6d4 --- /dev/null +++ b/ObjectDetection-CoreML/ObjectDetection-CoreML/en.lproj/Main.strings @@ -0,0 +1,3 @@ + +/* Class = "UILabel"; text = "FOOOOOOOO"; ObjectID = "5qT-cM-zXu"; */ +"5qT-cM-zXu.text" = "FOOOOOOOO"; diff --git a/ObjectDetection-CoreML/README.md b/ObjectDetection-CoreML/README.md new file mode 100644 index 0000000..c490ff7 --- /dev/null +++ b/ObjectDetection-CoreML/README.md @@ -0,0 +1,61 @@ +# Object Detection with PyTorch, Core ML, and Vision on iOS + +

+ +

+ +## Introduction +This demo app was built to showcase how to use PyTorch with Apple's Core ML. The app uses YOLOv5 as an example model. + +[YOLOv5](https://github.com/ultralytics/yolov5) is a family of object detection models built with PyTorch. The models enable detecting objects from single images, where the model output includes predictions of bounding boxes, the bounding box classification, and the confidence of the prediction. + + +## Prerequisites + +* Python >=3.7 +* Xcode + +## Quick Start + +### 1. Prepare the model + +Start by cloning the repository and submodules: + +``` +git clone git@github.com:pytorch/ios-demo-app.git --recursive +cd ObjectDetection-CoreML +``` + + +The Python script `export-nms.py` in the `yolov5` submodule folder is used to generate a Core ML -formatted YOLOv5 model. The script is a modified version of the original `export.py` script that includes the NMS at the end of the model to support using iOS's Vision. + +Before running the script, create a python environment with python >=3.7 and install dependencies both in `requirements.txt` and `requirements-export.txt` with: + +`pip install -r requirements.txt -r requirements-export.txt`. + +To export the model, navigate to the `yolov5` directory and run: + +`python export-nms.py --include coreml --weights yolov5n.pt` (The example app uses the nano-variant of the model)` + +Note that the export has been tested with `python==3.7.13`, the dependecies in the `requirements` files and the specific commit of the included `yolov5` submodule. + + +### 2. Run the app + +Navigate to the root of the `ObjectDetection-CoreML` directory and open the project with: + +`open ObjectDetection-CoreML.xcodeproj` + +The created `yolov5n.mlmodel` file in the `yolov5` directory needs to be dragged into the Xcode project files. Make sure to include the model in the target: + +Screenshot 2022-08-23 at 16 33 51 + +Result: + +Screenshot 2022-08-23 at 16 16 44 + +Select an iOS simulator or device on Xcode to run the app. The app will start outputting predictions and the current inference time: + +

+ +

diff --git a/ObjectDetection-CoreML/yolov5 b/ObjectDetection-CoreML/yolov5 new file mode 160000 index 0000000..1023da9 --- /dev/null +++ b/ObjectDetection-CoreML/yolov5 @@ -0,0 +1 @@ +Subproject commit 1023da95a54466cc320d79cc0408ea8b171d0321