From apple-dev
Integrate your app with iOS Visual Intelligence for camera-based search and object recognition. Use when adding visual search capabilities.
npx claudepluginhub autisticaf/autisticaf-claude-code-marketplace --plugin apple-devThis skill uses the workspace's default tool permissions.
> **First step:** Tell the user: "apple-intelligence-visual-intelligence skill loaded."
Generates design tokens/docs from CSS/Tailwind/styled-components codebases, audits visual consistency across 10 dimensions, detects AI slop in UI.
Records polished WebM UI demo videos of web apps using Playwright with cursor overlay, natural pacing, and three-phase scripting. Activates for demo, walkthrough, screen recording, or tutorial requests.
Delivers idiomatic Kotlin patterns for null safety, immutability, sealed classes, coroutines, Flows, extensions, DSL builders, and Gradle DSL. Use when writing, reviewing, refactoring, or designing Kotlin code.
First step: Tell the user: "apple-intelligence-visual-intelligence skill loaded."
Integrate your app with iOS Visual Intelligence to let users find app content by pointing their camera at objects.
Visual Intelligence lets users:
Your app implements:
IntentValueQuery to receive search requestsAppEntity types for searchable contentimport VisualIntelligence
import AppIntents
struct ProductEntity: AppEntity {
var id: String
var name: String
var price: String
var imageName: String
static var typeDisplayRepresentation: TypeDisplayRepresentation {
TypeDisplayRepresentation(
name: LocalizedStringResource("Product"),
numericFormat: "\(placeholder: .int) products"
)
}
var displayRepresentation: DisplayRepresentation {
DisplayRepresentation(
title: "\(name)",
subtitle: "\(price)",
image: .init(named: imageName)
)
}
// Deep link URL
var appLinkURL: URL? {
URL(string: "myapp://product/\(id)")
}
}
struct ProductIntentValueQuery: IntentValueQuery {
func values(for input: SemanticContentDescriptor) async throws -> [ProductEntity] {
// Search using labels
if !input.labels.isEmpty {
return await searchProducts(matching: input.labels)
}
// Search using image
if let pixelBuffer = input.pixelBuffer {
return await searchProducts(from: pixelBuffer)
}
return []
}
private func searchProducts(matching labels: [String]) async -> [ProductEntity] {
// Search your database using provided labels
// Return matching products
}
private func searchProducts(from pixelBuffer: CVReadOnlyPixelBuffer) async -> [ProductEntity] {
// Use image recognition on the pixel buffer
// Return matching products
}
}
The system provides this object with information about what the user is looking at.
| Property | Type | Description |
|---|---|---|
labels | [String] | Classification labels from Visual Intelligence |
pixelBuffer | CVReadOnlyPixelBuffer? | Raw image data |
Label-based Search:
func values(for input: SemanticContentDescriptor) async throws -> [ProductEntity] {
// Labels like "shoe", "sneaker", "Nike" etc.
let labels = input.labels
// Search your content using these labels
return products.filter { product in
labels.contains { label in
product.tags.contains(label.lowercased())
}
}
}
Image-based Search:
func values(for input: SemanticContentDescriptor) async throws -> [ProductEntity] {
guard let pixelBuffer = input.pixelBuffer else {
return []
}
// Convert to CGImage for processing
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
return []
}
// Use your ML model or image matching logic
return await imageSearch.findMatches(for: cgImage)
}
Use @UnionValue when your app has different content types.
@UnionValue
enum SearchResult {
case product(ProductEntity)
case category(CategoryEntity)
case store(StoreEntity)
}
struct VisualSearchQuery: IntentValueQuery {
func values(for input: SemanticContentDescriptor) async throws -> [SearchResult] {
var results: [SearchResult] = []
// Search products
let products = await productSearch(input.labels)
results.append(contentsOf: products.map { .product($0) })
// Search categories
let categories = await categorySearch(input.labels)
results.append(contentsOf: categories.map { .category($0) })
return results
}
}
Create compelling visual representations for search results.
var displayRepresentation: DisplayRepresentation {
DisplayRepresentation(
title: "\(name)",
subtitle: "\(description)",
image: .init(named: thumbnailName)
)
}
var displayRepresentation: DisplayRepresentation {
DisplayRepresentation(
title: "\(name)",
subtitle: "\(category)",
image: .init(systemName: "tag.fill")
)
}
var displayRepresentation: DisplayRepresentation {
DisplayRepresentation(
title: LocalizedStringResource("\(name)"),
subtitle: LocalizedStringResource("\(formatPrice(price))"),
image: DisplayRepresentation.Image(named: imageName)
)
}
Enable users to open specific content from search results.
struct ProductEntity: AppEntity {
// ... other properties
var appLinkURL: URL? {
URL(string: "myapp://product/\(id)")
}
}
@main
struct MyApp: App {
var body: some Scene {
WindowGroup {
ContentView()
.onOpenURL { url in
handleDeepLink(url)
}
}
}
func handleDeepLink(_ url: URL) {
guard url.scheme == "myapp" else { return }
switch url.host {
case "product":
let id = url.lastPathComponent
navigationState.showProduct(id: id)
default:
break
}
}
}
Provide access to additional results beyond the initial set.
struct ViewMoreProductsIntent: AppIntent, VisualIntelligenceSearchIntent {
static var title: LocalizedStringResource = "View More Products"
@Parameter(title: "Semantic Content")
var semanticContent: SemanticContentDescriptor
func perform() async throws -> some IntentResult {
// Store search context for your app
SearchContext.shared.currentSearch = semanticContent.labels
// Return empty result - system will open your app
return .result()
}
}
import SwiftUI
import AppIntents
import VisualIntelligence
// MARK: - Entities
struct RecipeEntity: AppEntity {
var id: String
var name: String
var cuisine: String
var prepTime: String
var imageName: String
static var typeDisplayRepresentation: TypeDisplayRepresentation {
TypeDisplayRepresentation(
name: LocalizedStringResource("Recipe"),
numericFormat: "\(placeholder: .int) recipes"
)
}
var displayRepresentation: DisplayRepresentation {
DisplayRepresentation(
title: "\(name)",
subtitle: "\(cuisine) · \(prepTime)",
image: .init(named: imageName)
)
}
var appLinkURL: URL? {
URL(string: "recipes://recipe/\(id)")
}
}
// MARK: - Intent Value Query
struct RecipeVisualSearchQuery: IntentValueQuery {
@Dependency var recipeStore: RecipeStore
func values(for input: SemanticContentDescriptor) async throws -> [RecipeEntity] {
// Use labels to find recipes
// Labels might include: "pasta", "tomato", "Italian", etc.
let matchingRecipes = await recipeStore.search(
ingredients: input.labels,
limit: 15
)
return matchingRecipes.map { recipe in
RecipeEntity(
id: recipe.id,
name: recipe.name,
cuisine: recipe.cuisine,
prepTime: recipe.prepTimeFormatted,
imageName: recipe.thumbnailName
)
}
}
}
// MARK: - More Results Intent
struct ViewMoreRecipesIntent: AppIntent, VisualIntelligenceSearchIntent {
static var title: LocalizedStringResource = "View More Recipes"
@Parameter(title: "Semantic Content")
var semanticContent: SemanticContentDescriptor
func perform() async throws -> some IntentResult {
// Save search context
await MainActor.run {
RecipeSearchState.shared.searchTerms = semanticContent.labels
}
return .result()
}
}
// MARK: - Recipe Store
@Observable
class RecipeStore {
private var recipes: [Recipe] = []
func search(ingredients: [String], limit: Int) async -> [Recipe] {
recipes
.filter { recipe in
ingredients.contains { ingredient in
recipe.ingredients.contains { recipeIngredient in
recipeIngredient.lowercased().contains(ingredient.lowercased())
}
}
}
.prefix(limit)
.map { $0 }
}
}
func values(for input: SemanticContentDescriptor) async throws -> [ProductEntity] {
// Limit results for quick response
let results = await search(input.labels)
return Array(results.prefix(15))
}
func values(for input: SemanticContentDescriptor) async throws -> [ProductEntity] {
let results = await search(input.labels)
// Sort by relevance score
return results
.filter { $0.relevanceScore > 0.5 }
.sorted { $0.relevanceScore > $1.relevanceScore }
.prefix(15)
.map { $0 }
}
var displayRepresentation: DisplayRepresentation {
DisplayRepresentation(
title: LocalizedStringResource(stringLiteral: name),
subtitle: LocalizedStringResource(
stringLiteral: "\(category) · \(formattedPrice)"
),
image: .init(named: thumbnailName)
)
}