Skip to content

CoreML tvOS xcode16.0 b1

Rolf Bjarne Kvinge edited this page Aug 29, 2024 · 3 revisions

#CoreML.framework https://github.com/xamarin/xamarin-macios/pull/21145

diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/CoreML.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/CoreML.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/CoreML.h	2024-04-13 14:33:49
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/CoreML.h	2024-05-30 04:15:33
@@ -17,11 +17,13 @@
 #import <CoreML/MLArrayBatchProvider.h>
 #import <CoreML/MLMultiArray.h>
 #import <CoreML/MLSequence.h>
+#import <CoreML/MLState.h>
 
 #import <CoreML/MLMultiArrayConstraint.h>
 #import <CoreML/MLImageConstraint.h>
 #import <CoreML/MLDictionaryConstraint.h>
 #import <CoreML/MLSequenceConstraint.h>
+#import <CoreML/MLStateConstraint.h>
 
 #import <CoreML/MLImageSize.h>
 #import <CoreML/MLImageSizeConstraint.h>
@@ -38,6 +40,7 @@
 
 #import <CoreML/MLModel+MLModelCompilation.h>
 #import <CoreML/MLModel+MLComputeDevice.h>
+#import <CoreML/MLModel+MLState.h>
 
 #import <CoreML/MLModelError.h>
 
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureDescription.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureDescription.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureDescription.h	2024-04-13 14:33:51
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureDescription.h	2024-05-30 04:15:35
@@ -12,6 +12,7 @@
 #import <CoreML/MLMultiArrayConstraint.h>
 #import <CoreML/MLImageConstraint.h>
 #import <CoreML/MLSequenceConstraint.h>
+#import <CoreML/MLStateConstraint.h>
 #import <CoreML/MLExport.h>
 
 NS_ASSUME_NONNULL_BEGIN
@@ -54,6 +55,11 @@
 
 /// Constraint when type == MLFeatureTypeSequence, nil otherwise
 @property (readonly, nullable, nonatomic) MLSequenceConstraint *sequenceConstraint API_AVAILABLE(macos(10.14), ios(12.0), watchos(5.0), tvos(12.0));
+
+/// The state feature value constraint.
+///
+/// The property has a value when `.type == MLFeatureTypeState`.
+@property (readonly, nullable, nonatomic) MLStateConstraint *stateConstraint API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
 
 @end
 
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureType.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureType.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureType.h	2024-04-13 14:33:51
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureType.h	2024-05-30 04:15:35
@@ -34,6 +34,9 @@
 
     /// MLSequence. Ordered collection of feature values with the same type
     MLFeatureTypeSequence API_AVAILABLE(macos(10.14), ios(12.0), watchos(5.0), tvos(12.0)) = 7,
+    
+    /// MLState. Represents a model state that may be updated in each inference.
+    MLFeatureTypeState API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0)) = 8,
 
 } API_AVAILABLE(macos(10.13), ios(11.0), watchos(4.0), tvos(11.0));
 
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureValue.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureValue.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureValue.h	2024-04-13 14:33:52
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLFeatureValue.h	2024-05-30 04:15:36
@@ -5,13 +5,13 @@
 //  Copyright © 2017 Apple Inc. All rights reserved.
 //
 
-#import <Foundation/Foundation.h>
 #import <CoreML/MLFeatureType.h>
 #import <CoreML/MLMultiArray.h>
 #import <CoreML/MLSequence.h>
-#import <CoreVideo/CVPixelBuffer.h>
+#import <CoreML/MLState.h>
 #import <CoreML/MLExport.h>
-
+#import <Foundation/Foundation.h>
+#import <CoreVideo/CVPixelBuffer.h>
 
 NS_ASSUME_NONNULL_BEGIN
 
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModel+MLState.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModel+MLState.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModel+MLState.h	1970-01-01 01:00:00
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModel+MLState.h	2024-05-30 05:06:53
@@ -0,0 +1,106 @@
+//
+//  MLModel+MLState.h
+//  CoreML_framework
+//
+//  Copyright © 2024 Apple Inc. All rights reserved.
+//
+
+#import <CoreML/MLModel.h>
+#import <CoreML/MLState.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0))
+ML_EXPORT
+@interface MLModel (MLState)
+
+/// Creates a new state object.
+///
+/// Core ML framework will allocate the state buffers declared in the model.
+///
+/// The allocated state buffers are initialized to zeros. To initialize with different values, use `.withMultiArray(for:)` to get the mutable `MLMultiArray`-view to the state buffer.
+///
+/// It returns an empty state when the model is stateless. One can use the empty state with stateful prediction functions such as `prediction(from:using:)` and those predictions will be stateless. This simplifies the call site which may or may not use a stateful model.
+///
+/// ```swift
+/// // Create state that contains two state buffers: s1 and s2.
+/// // Then, initialize s1 to 1.0 and s2 to 2.0.
+/// let state = model.newState()
+/// state.withMultiArray(for: "s1") { stateMultiArray in
+///     stateMultiArray[0] = 1.0
+/// }
+/// state.withMultiArray(for: "s2") { stateMultiArray in
+///     stateMultiArray[0] = 2.0
+/// }
+/// ```
+- (MLState *)newState
+    NS_REFINED_FOR_SWIFT
+    API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
+
+/// Run a stateful prediction synchronously.
+///
+/// Use this method to run predictions on a stateful model.
+///
+/// ```swift
+/// let state = model.newState()
+/// let prediction = try model.prediction(from: inputFeatures, using: state)
+/// ```
+///
+/// - Parameters:
+///  - inputFeatures: The input features as declared in the model description.
+///  - state: The state object created by `newState()` method.
+///  - error: The output parameter to receive an error information on failure.
+- (nullable id<MLFeatureProvider>)predictionFromFeatures:(id<MLFeatureProvider>)inputFeatures
+                                              usingState:(MLState *)state
+                                                   error:(NSError * __autoreleasing *)error
+    NS_SWIFT_NAME(prediction(from:using:))
+    API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
+
+/// Run a stateful prediction synchronously with options.
+///
+/// Use this method to run predictions on a stateful model.
+///
+/// ```swift
+/// let state = model.newState()
+/// let prediction = try model.prediction(from: inputFeatures, using: state, options: predictionOptions)
+/// ```
+///
+/// - Parameters:
+///  - inputFeatures: The input features as declared in the model description.
+///  - state: The state object created by `newState()` method.
+///  - options: The prediction options.
+///  - error: The output parameter to receive an error information on failure.
+- (nullable id<MLFeatureProvider>)predictionFromFeatures:(id<MLFeatureProvider>)inputFeatures
+                                              usingState:(MLState *)state
+                                                 options:(MLPredictionOptions *)options
+                                                   error:(NSError * __autoreleasing *)error
+    NS_SWIFT_NAME(prediction(from:using:options:))
+    API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
+
+/// Run a stateful prediction asynchronously.
+///
+/// Use this method to run predictions on a stateful model.
+/// 
+/// Do not request a prediction while another prediction that shares the same state is in-flight, otherwise the behavior is undefined.
+///
+/// ```swift
+/// let state = model.newState()
+/// let prediction = try await model.prediction(from: inputFeatures, using: state)
+/// ```
+///
+/// - Parameters
+///  - input: The input features to make a prediction from.
+///  - state: The state object created by `newState()` method.
+///  - options: Prediction options to modify how the prediction is run.
+///  - completionHandler: A block that will be invoked once the prediction has completed successfully or unsuccessfully. On success, it is invoked with a valid model output. On failure, it is invoked with a nil output and NSError
+- (void)predictionFromFeatures:(id<MLFeatureProvider>)inputFeatures
+                    usingState:(MLState *)state
+                       options:(MLPredictionOptions *)options
+             completionHandler:(void (^)(_Nullable id<MLFeatureProvider> output, NSError * _Nullable error))completionHandler
+                 API_AVAILABLE(macos(16.0), ios(18.0), watchos(11.0), tvos(18.0))
+                 NS_REFINED_FOR_SWIFT NS_SWIFT_DISABLE_ASYNC;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModel.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModel.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModel.h	2024-03-22 20:19:40
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModel.h	2024-05-30 05:38:48
@@ -55,35 +55,46 @@
             configuration:(MLModelConfiguration *)configuration
         completionHandler:(void (^)(MLModel * _Nullable model, NSError * _Nullable error))handler API_AVAILABLE(macos(11.0), ios(14.0), watchos(7.0), tvos(14.0)) NS_REFINED_FOR_SWIFT NS_SWIFT_DISABLE_ASYNC;
 
-/// Convenience method to run a prediction synchronously with default prediction options.
+/// Run a prediction on a model synchronously.
+///
+/// This is a convenience overload method of `prediction(from:options:)` that uses the default prediction options.
+///
+/// - Parameters
+///   - input: The input features to make a prediction from.
+///   - error: The output parameter to be filled with error information on failure.
+/// - Returns: The output features from the prediction.
 - (nullable id<MLFeatureProvider>)predictionFromFeatures:(id<MLFeatureProvider>)input
-                                                   error:(NSError **)error;
+                                                   error:(NSError * __autoreleasing *)error;
 
-/*!
- Run a prediction on a model synchronously
-
- @param input The input features to make a prediction from
- @param options Prediction options to modify how the prediction is run
- @param error The out parameter for error when nil is returned. On success, it is nil
- @returns The output features from the prediction
-*/
+/// Run a prediction on a model synchronously
+///
+/// - Parameters
+///   - input: The input features to make a prediction from.
+///   - options: Prediction options to modify how the prediction is run.
+///   - error: The output parameter to be filled with error information on failure.
+/// - Returns: The output features from the prediction.
 - (nullable id<MLFeatureProvider>)predictionFromFeatures:(id<MLFeatureProvider>)input
                                                  options:(MLPredictionOptions *)options
                                                    error:(NSError **)error;
 
-/// Convenience method to run a prediction asynchronously with default prediction options.
+/// Run a prediction on a model asynchronously.
+///
+/// This is a convenience overload method of `prediction(from:options:) async` that uses the default prediction options.
+///
+/// - Parameters
+///   - input: The input features to make a prediction from.
+///   - completionHandler: A block that will be invoked once the prediction has completed successfully or unsuccessfully. On success, it is invoked with a valid model output. On failure, it is invoked with a nil output and NSError
 - (void)predictionFromFeatures:(id<MLFeatureProvider>)input
              completionHandler:(void (^)(_Nullable id<MLFeatureProvider> output, NSError * _Nullable error))completionHandler
                  API_AVAILABLE(macos(14.0), ios(17.0), watchos(10.0), tvos(17.0))
                  NS_REFINED_FOR_SWIFT NS_SWIFT_DISABLE_ASYNC;
 
-/*!
- Run a prediction on a model asynchronously.
-
- @param input The input features to make a prediction from
- @param options Prediction options to modify how the prediction is run
- @param completionHandler A block that will be invoked once the prediction has completed successfully or unsuccessfully. On success, it is invoked with a valid model output. On failure, it is invoked with a nil output and NSError
-*/
+/// Run a prediction on a model asynchronously.
+///
+/// - Parameters
+///   - input: The input features to make a prediction from.
+///   - options: Prediction options to modify how the prediction is run.
+///   - completionHandler: A block that will be invoked once the prediction has completed successfully or unsuccessfully. On success, it is invoked with a valid model output. On failure, it is invoked with a nil output and NSError
 - (void)predictionFromFeatures:(id<MLFeatureProvider>)input
                        options:(MLPredictionOptions *)options
              completionHandler:(void (^)(_Nullable id<MLFeatureProvider> output, NSError * _Nullable error))completionHandler
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelAsset.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelAsset.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelAsset.h	2024-04-13 14:33:51
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelAsset.h	2024-05-30 02:24:05
@@ -4,24 +4,96 @@
 //
 //  Copyright © 2022 Apple Inc. All rights reserved.
 //
+#import <CoreML/MLModelDescription.h>
 
-#import <Foundation/Foundation.h>
-
 NS_ASSUME_NONNULL_BEGIN
 
+/// A compiled model asset.
+///
+/// `MLModelAsset` is an abstraction of a compiled model, which can be:
+///
+///  - `.mlmodelc` bundle on the file system
+///  - In-memory model specification
+///
+/// It provides the unified interface to query the model description and to instantiate `MLModel`.
+///
+/// ```swift
+/// // Creates an object.
+/// let modelAsset = MLModelAsset(url: modelURL)
+///
+/// // Query the model description
+/// let description = try await modelAsset.modelDescription
+///
+/// // Query the list of functions in the model asset.
+/// let functionNames = try await modelAsset.functionNames
+///
+/// // Query the model description of a specific function.
+/// let descriptionOfMyFunction = try await modelAsset.modelDescription(of: "MyFunction")
+///
+/// // Instantiate `MLModel` for "MyFunction".
+/// let modelConfiguration = MLModelConfiguration()
+/// modelConfiguration.functionName = "MyFunction"
+/// let model = try await MLModel.load(asset: modelAsset, configuration: modelConfiguration)
+/// ```
 ML_EXPORT
 API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0))
 @interface MLModelAsset : NSObject
 
-/*!
- * Construct an optimized model asset from the contents of specification data.
- *
- * @param specificationData Contents of .mlmodel as a data blob.
- * @param error When the model asset creation fails error is populated with the reason for failure.
- *
- */
+/// Construct an optimized model asset from the contents of specification data.
+///
+/// - Parameters:
+///   - specificationData: Contents of .mlmodel as a data blob.
+///   - error: When the model asset creation fails error is populated with the reason for failure.
 + (nullable instancetype)modelAssetWithSpecificationData:(NSData *)specificationData
                                                    error:(NSError * _Nullable __autoreleasing *)error API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos) NS_SWIFT_NAME(init(specification:));
+
+/// Constructs a ModelAsset from a compiled model URL.
+///
+/// - Parameters:
+///   - compiledModelURL: Location on the disk where the model asset is present.
+///   - error: Errors if the model asset is not loadable.
+///
+/// - Returns: a model asset or nil if there is an error.
++ (nullable instancetype)modelAssetWithURL:(NSURL *)compiledModelURL
+                                     error:(NSError **)error API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
+
+/// The default model descripton.
+///
+/// Use this method to get the description of the model such as the feature descriptions, the model author, and other metadata.
+///
+/// For the multi-function model asset, this method vends the description for the default function. Use `modelDescription(for:)` to get the model description of other functions.
+///
+/// ```swift
+/// let modelAsset = try MLModelAsset(url: modelURL)
+/// let modelDescription = try await modelAsset.modelDescription()
+/// print(modelDescription)
+/// ```
+- (void)modelDescriptionWithCompletionHandler:(void (^)(MLModelDescription * _Nullable modelDescription, NSError * _Nullable error))handler API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0)) NS_SWIFT_ASYNC_NAME(getter:modelDescription());
+
+/// The model descripton for a specified function.
+///
+/// Use this method to get the description of the model such as the feature descriptions, the model author, and other metadata.
+///
+/// ```swift
+/// let modelAsset = try MLModelAsset(url: modelURL)
+/// let modelDescription = try await modelAsset.modelDescription(of: "my_function")
+/// print(modelDescription)
+/// ```
+- (void)modelDescriptionOfFunctionNamed:(NSString *)functionName
+                      completionHandler:(void (^)(MLModelDescription * _Nullable modelDescription, NSError * _Nullable error))handler API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0)) NS_SWIFT_ASYNC_NAME(modelDescription(of:));
+
+/// The list of function names in the model asset.
+///
+/// Some model types (e.g. ML Program) supports multiple functions. Use this method to query the function names.
+///
+/// The method vends the empty array when the model doesn't use the multi-function configuration.
+///
+/// ```swift
+/// let modelAsset = try MLModelAsset(url: modelURL)
+/// let functionNames = try await modelAsset.functionNames
+/// print(functionNames) // For example, ["my_function1", "my_function2"];
+/// ```
+- (void)functionNamesWithCompletionHandler:(void (^)(NSArray<NSString *> * _Nullable functionNames, NSError * _Nullable error))handler API_UNAVAILABLE(macos, ios, watchos, tvos) NS_SWIFT_ASYNC_NAME(getter:functionNames());
 
 // cannot construct MLModelAsset without model specification data.
 - (instancetype)init NS_UNAVAILABLE;
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelCollection.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelCollection.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelCollection.h	2024-04-13 14:33:52
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelCollection.h	2024-05-30 04:15:36
@@ -16,18 +16,18 @@
  *
  * A collection of models managed as part of Core ML Model Deployment.
  */
-API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4))API_UNAVAILABLE(tvos, watchos)
+MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4))API_UNAVAILABLE(tvos, watchos)
 ML_EXPORT
 @interface MLModelCollection : NSObject
 
 /// The identifier of the model collection you want to access, as configured in the Core ML Model Deployment dashboard.
-@property (readonly, nonatomic, copy) NSString *identifier  API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+@property (readonly, nonatomic, copy) NSString *identifier  MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
 /// Information about the models downloaded in the collection, or an empty dictionary if the collection has not been downloaded.
-@property (readonly, nonatomic, copy) NSDictionary<NSString *, MLModelCollectionEntry *> *entries API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+@property (readonly, nonatomic, copy) NSDictionary<NSString *, MLModelCollectionEntry *> *entries MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
 /// The identifier for the currently downloaded deployment, corresponding to a recent deployment on the Core ML Model Deployment dashboard.
-@property (readonly, nonatomic, copy) NSString *deploymentID API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+@property (readonly, nonatomic, copy) NSString *deploymentID MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
 /*!
   Request access to a model collection. If the collection is not downloaded on the device, it is requested
@@ -42,7 +42,7 @@
 */
 + (NSProgress *)beginAccessingModelCollectionWithIdentifier:(NSString *)identifier
                                           completionHandler:(void (^)(MLModelCollection *_Nullable modelCollection, NSError *_Nullable error))completionHandler NS_REFINED_FOR_SWIFT
-                                          API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+                                          MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
 /*!
   End access to a model collection. This informs the system you have finished accessing the models within the collection.
@@ -54,7 +54,7 @@
 */
 + (void)endAccessingModelCollectionWithIdentifier:(NSString *)identifier
                                 completionHandler:(void (^)(BOOL success, NSError *_Nullable error))completionHandler NS_REFINED_FOR_SWIFT NS_SWIFT_ASYNC_NAME(endAccessing(identifier:))
-                                API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+                                MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
 - (instancetype)init NS_UNAVAILABLE;
 
@@ -63,9 +63,8 @@
 @end
 
 /// Notification posted when the model collection has changed.
-API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4))API_UNAVAILABLE(tvos, watchos)
-API_UNAVAILABLE(tvos, watchos)
+MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4))API_UNAVAILABLE(tvos, watchos)
 ML_EXPORT
-NSNotificationName const MLModelCollectionDidChangeNotification API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+NSNotificationName const MLModelCollectionDidChangeNotification MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
 NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelCollectionEntry.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelCollectionEntry.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelCollectionEntry.h	2024-04-13 14:33:52
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelCollectionEntry.h	2024-05-30 04:15:37
@@ -8,22 +8,60 @@
 #import <Foundation/Foundation.h>
 #import <CoreML/MLExport.h>
 
+#import <Availability.h>
+#import <AvailabilityMacros.h>
+#import <AvailabilityVersions.h>
+
+#if TARGET_OS_OSX && defined(__MAC_OS_X_VERSION_MIN_REQUIRED)
+    #if (__MAC_OS_X_VERSION_MIN_REQUIRED >= 150000 /* __MAC_15_0 */)
+        #define MODELCOLLECTION_SUNSET(...) API_UNAVAILABLE(macos)
+    #else
+        #define MODELCOLLECTION_SUNSET(...) API_DEPRECATED(__VA_ARGS__)
+    #endif
+#elif TARGET_OS_IOS && defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
+    #if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 180000)
+        #define MODELCOLLECTION_SUNSET(...) API_UNAVAILABLE(ios)
+    #else
+        #define MODELCOLLECTION_SUNSET(...) API_DEPRECATED(__VA_ARGS__)
+    #endif
+#elif TARGET_OS_WATCH && defined(__WATCH_OS_VERSION_MIN_REQUIRED)
+    #if (__WATCH_OS_VERSION_MIN_REQUIRED >= 110000 /* __WATCHOS_11_0 */)
+        #define MODELCOLLECTION_SUNSET(...) API_UNAVAILABLE(watchos)
+    #else
+        #define MODELCOLLECTION_SUNSET(...) API_DEPRECATED(__VA_ARGS__)
+    #endif
+#elif TARGET_OS_TV && defined(__TV_OS_VERSION_MIN_REQUIRED)
+    #if (__TV_OS_VERSION_MIN_REQUIRED >= 180000 /* __TVOS_18_0 */)
+        #define MODELCOLLECTION_SUNSET(...) API_UNAVAILABLE(tvos)
+    #else
+        #define MODELCOLLECTION_SUNSET(...) API_DEPRECATED(__VA_ARGS__)
+    #endif
+#elif TARGET_OS_VISION
+    #if (__VISION_OS_VERSION_MIN_REQUIRED >= 20000 /* __VISIONOS_2_0 */)
+        #define MODELCOLLECTION_SUNSET(...) API_UNAVAILABLE(visionos)
+    #else
+        #define MODELCOLLECTION_SUNSET(...) API_DEPRECATED(__VA_ARGS__)
+    #endif
+#else
+    #define MODELCOLLECTION_SUNSET(...) API_UNAVAILABLE(__VA_ARGS__)
+#endif
+
 NS_ASSUME_NONNULL_BEGIN
 
 /*!
  * MLModelCollectionEntry
  * Information about a model in a model collection.
  */
-API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4))
+MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4))
 API_UNAVAILABLE(tvos, watchos)
 ML_EXPORT
 @interface MLModelCollectionEntry : NSObject
 
-@property (readonly, nonatomic) NSString *modelIdentifier API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+@property (readonly, nonatomic) NSString *modelIdentifier MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
-@property (readonly, nonatomic) NSURL *modelURL API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+@property (readonly, nonatomic) NSURL *modelURL MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
-- (BOOL)isEqualToModelCollectionEntry:(MLModelCollectionEntry *)entry API_DEPRECATED("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
+- (BOOL)isEqualToModelCollectionEntry:(MLModelCollectionEntry *)entry MODELCOLLECTION_SUNSET("Use Background Assets or NSURLSession instead.", macos(11.0, 14.4), ios(14.0, 17.4));
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelConfiguration.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelConfiguration.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelConfiguration.h	2024-04-13 14:33:52
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelConfiguration.h	2024-05-30 04:15:36
@@ -73,4 +73,22 @@
 
 @end
 
+API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0))
+ML_EXPORT
+@interface MLModelConfiguration (MultiFunctions)
+
+/// Function name that `MLModel` will use.
+///
+/// Some model types (e.g. ML Program) supports multiple functions in a model asset, where each `MLModel` instance is associated with a particular function.
+///
+/// Use `MLModelAsset` to get the list of available functions. Use `nil` to use a default function.
+///
+/// ```swift
+/// let configuration = MLModelConfiguration()
+/// configuration.functionName = "my_function"
+/// ```
+@property (readwrite, nullable, copy, nonatomic) NSString *functionName API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
+
+@end
+
 NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelDescription.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelDescription.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelDescription.h	2024-04-13 14:33:52
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLModelDescription.h	2024-05-30 04:15:36
@@ -15,10 +15,8 @@
 @class MLParameterKey;
 @class MLParameterDescription;
 
-/*!
- * A description of a model containing input and output feature descriptions, optionally outputted features
- * with special meaning and metadata.
- */
+/// A description of a model containing input, output, and state feature descriptions, optionally outputted features
+/// with special meaning and metadata.
 API_AVAILABLE(macos(10.13), ios(11.0), watchos(4.0), tvos(11.0))
 ML_EXPORT
 @interface MLModelDescription : NSObject <NSSecureCoding>
@@ -28,6 +26,9 @@
 
 /// Description of the outputs from the model
 @property (readonly, nonatomic) NSDictionary<NSString *, MLFeatureDescription *> *outputDescriptionsByName;
+
+/// Description of the state features.
+@property (readonly, nonatomic) NSDictionary<NSString *, MLFeatureDescription *> *stateDescriptionsByName API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
 
 /// Name of the primary target / predicted output feature in the output descriptions
 @property (readonly, nullable, nonatomic, copy) NSString *predictedFeatureName;
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLMultiArray.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLMultiArray.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLMultiArray.h	2024-03-22 20:19:40
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLMultiArray.h	2024-05-30 05:06:53
@@ -76,7 +76,7 @@
 /*!
  * Create by wrapping a pixel buffer.
  *
- * Use this initializer to create IOSurface backed MLMultiArray, which can reduce the inference latency by avoiding the buffer copy.
+ * Use this initializer to create an IOSurface backed MLMultiArray, which can reduce the inference latency by avoiding the buffer copy.
  *
  * The instance will own the pixel buffer and release it on the deallocation.
  *
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLState.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLState.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLState.h	1970-01-01 01:00:00
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLState.h	2024-05-30 04:15:35
@@ -0,0 +1,64 @@
+//
+//  MLState.h
+//  CoreML
+//
+//  Copyright © 2023 Apple Inc. All rights reserved.
+//
+
+#import <CoreML/MLExport.h>
+#import <Foundation/Foundation.h>
+
+@class MLMultiArray;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/// Handle to the state buffers.
+///
+/// A stateful model maintains a state from one prediction to another by storing the information in the state buffers. To use such a model, the client must request the model to create state buffers and get `MLState` object, which is the handle to those buffers. Then, at the prediction time, pass the `MLState` object in one of the stateful prediction functions.
+///
+/// ```swift
+/// // Load a stateful model
+/// let modelAsset = try MLModelAsset(url: modelURL)
+/// let model = try await MLModel.load(asset: modelAsset, configuration: MLModelConfiguration())
+///
+/// // Request a state
+/// let state = model.newState()
+///
+/// // Run predictions
+/// for _ in 0 ..< 42 {
+///   _ = try await model.prediction(from: inputFeatures, using: state)
+/// }
+///
+/// // Access the state buffer.
+/// state.withMultiArray(for: "accumulator") { stateMultiArray in
+///   ...
+/// }
+/// ```
+///
+/// The object is a handle to the state buffers. The client shall not read or write the buffers while a prediction is in-flight.
+///
+/// Each stateful prediction that uses the same `MLState` must be serialized. Otherwise, if two such predictions run concurrently, the behavior is undefined.
+///
+API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0))
+ML_EXPORT NS_SWIFT_SENDABLE
+__attribute__((objc_subclassing_restricted))
+@interface MLState : NSObject
+
+/// Gets a mutable view into a state buffer.
+///
+/// The underlying state buffer's address can differ for each call; one shall not access the state buffer outside of the closure.
+///
+/// - Parameters:
+///   - handler: Block to access the state buffer through `MLMultiArray`.
+- (void)getMultiArrayForStateNamed:(NSString *)stateName
+                           handler:(void (NS_NOESCAPE ^)(MLMultiArray *buffer))handler
+    NS_REFINED_FOR_SWIFT
+    API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
+
+// This type is currently unavailable
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)new NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLStateConstraint.h /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLStateConstraint.h
--- /Applications/Xcode_15.4.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLStateConstraint.h	1970-01-01 01:00:00
+++ /Applications/Xcode_16.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/CoreML.framework/Headers/MLStateConstraint.h	2024-05-30 04:15:35
@@ -0,0 +1,30 @@
+//
+//  MLStateConstraint.h
+//  CoreML
+//
+//  Copyright © 2024 Apple Inc. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import <CoreML/MLExport.h>
+#import <CoreML/MLMultiArray.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/// Constraint of a state feature value.
+API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0))
+ML_EXPORT
+@interface MLStateConstraint : NSObject <NSSecureCoding>
+
+/// The shape of the state buffer.
+@property (readonly, nonatomic) NSArray<NSNumber *> *bufferShape
+    NS_REFINED_FOR_SWIFT
+    API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
+
+/// The data type of scalars in the state buffer.
+@property (readonly, nonatomic) MLMultiArrayDataType dataType
+    API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
+
+@end
+
+NS_ASSUME_NONNULL_END
Clone this wiki locally
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy