@lap v0.3
# Machine-readable API spec. Each @endpoint block is one API call.
@api Amazon Machine Learning
@version 2014-12-12
@auth AWS SigV4
@endpoints 28
@hint download_for_search
@toc root(28)

@endpoint POST /
@required {Tags: [Tag], ResourceId: str, ResourceType: str}
@returns(200) {ResourceId: str?, ResourceType: str?}

@endpoint POST /
@required {BatchPredictionId: str, MLModelId: str, BatchPredictionDataSourceId: str, OutputUri: str}
@optional {BatchPredictionName: str}
@returns(200) {BatchPredictionId: str?}

@endpoint POST /
@required {DataSourceId: str, RDSData: RDSDataSpec, RoleARN: str}
@optional {DataSourceName: str, ComputeStatistics: bool}
@returns(200) {DataSourceId: str?}

@endpoint POST /
@required {DataSourceId: str, DataSpec: RedshiftDataSpec, RoleARN: str}
@optional {DataSourceName: str, ComputeStatistics: bool}
@returns(200) {DataSourceId: str?}

@endpoint POST /
@required {DataSourceId: str, DataSpec: S3DataSpec}
@optional {DataSourceName: str, ComputeStatistics: bool}
@returns(200) {DataSourceId: str?}

@endpoint POST /
@required {EvaluationId: str, MLModelId: str, EvaluationDataSourceId: str}
@optional {EvaluationName: str}
@returns(200) {EvaluationId: str?}

@endpoint POST /
@required {MLModelId: str, MLModelType: str, TrainingDataSourceId: str}
@optional {MLModelName: str, Parameters: map<str,str>, Recipe: str, RecipeUri: str}
@returns(200) {MLModelId: str?}

@endpoint POST /
@required {MLModelId: str}
@returns(200) {MLModelId: str?, RealtimeEndpointInfo: RealtimeEndpointInfo?{PeakRequestsPerSecond: int?, CreatedAt: str(timestamp)?, EndpointUrl: str?, EndpointStatus: str?}}

@endpoint POST /
@required {BatchPredictionId: str}
@returns(200) {BatchPredictionId: str?}

@endpoint POST /
@required {DataSourceId: str}
@returns(200) {DataSourceId: str?}

@endpoint POST /
@required {EvaluationId: str}
@returns(200) {EvaluationId: str?}

@endpoint POST /
@required {MLModelId: str}
@returns(200) {MLModelId: str?}

@endpoint POST /
@required {MLModelId: str}
@returns(200) {MLModelId: str?, RealtimeEndpointInfo: RealtimeEndpointInfo?{PeakRequestsPerSecond: int?, CreatedAt: str(timestamp)?, EndpointUrl: str?, EndpointStatus: str?}}

@endpoint POST /
@required {TagKeys: [str], ResourceId: str, ResourceType: str}
@returns(200) {ResourceId: str?, ResourceType: str?}

@endpoint POST /
@optional {FilterVariable: str, EQ: str, GT: str, LT: str, GE: str, LE: str, NE: str, Prefix: str, SortOrder: str, NextToken: str, Limit: int}
@returns(200) {Results: [BatchPrediction]?, NextToken: str?}

@endpoint POST /
@optional {FilterVariable: str, EQ: str, GT: str, LT: str, GE: str, LE: str, NE: str, Prefix: str, SortOrder: str, NextToken: str, Limit: int}
@returns(200) {Results: [DataSource]?, NextToken: str?}

@endpoint POST /
@optional {FilterVariable: str, EQ: str, GT: str, LT: str, GE: str, LE: str, NE: str, Prefix: str, SortOrder: str, NextToken: str, Limit: int}
@returns(200) {Results: [Evaluation]?, NextToken: str?}

@endpoint POST /
@optional {FilterVariable: str, EQ: str, GT: str, LT: str, GE: str, LE: str, NE: str, Prefix: str, SortOrder: str, NextToken: str, Limit: int}
@returns(200) {Results: [MLModel]?, NextToken: str?}

@endpoint POST /
@required {ResourceId: str, ResourceType: str}
@returns(200) {ResourceId: str?, ResourceType: str?, Tags: [Tag]?}

@endpoint POST /
@required {BatchPredictionId: str}
@returns(200) {BatchPredictionId: str?, MLModelId: str?, BatchPredictionDataSourceId: str?, InputDataLocationS3: str?, CreatedByIamUser: str?, CreatedAt: str(timestamp)?, LastUpdatedAt: str(timestamp)?, Name: str?, Status: str?, OutputUri: str?, LogUri: str?, Message: str?, ComputeTime: int(i64)?, FinishedAt: str(timestamp)?, StartedAt: str(timestamp)?, TotalRecordCount: int(i64)?, InvalidRecordCount: int(i64)?}

@endpoint POST /
@required {DataSourceId: str}
@optional {Verbose: bool}
@returns(200) {DataSourceId: str?, DataLocationS3: str?, DataRearrangement: str?, CreatedByIamUser: str?, CreatedAt: str(timestamp)?, LastUpdatedAt: str(timestamp)?, DataSizeInBytes: int(i64)?, NumberOfFiles: int(i64)?, Name: str?, Status: str?, LogUri: str?, Message: str?, RedshiftMetadata: RedshiftMetadata?{RedshiftDatabase: RedshiftDatabase?{DatabaseName: str, ClusterIdentifier: str}, DatabaseUserName: str?, SelectSqlQuery: str?}, RDSMetadata: RDSMetadata?{Database: RDSDatabase?{InstanceIdentifier: str, DatabaseName: str}, DatabaseUserName: str?, SelectSqlQuery: str?, ResourceRole: str?, ServiceRole: str?, DataPipelineId: str?}, RoleARN: str?, ComputeStatistics: bool?, ComputeTime: int(i64)?, FinishedAt: str(timestamp)?, StartedAt: str(timestamp)?, DataSourceSchema: str?}

@endpoint POST /
@required {EvaluationId: str}
@returns(200) {EvaluationId: str?, MLModelId: str?, EvaluationDataSourceId: str?, InputDataLocationS3: str?, CreatedByIamUser: str?, CreatedAt: str(timestamp)?, LastUpdatedAt: str(timestamp)?, Name: str?, Status: str?, PerformanceMetrics: PerformanceMetrics?{Properties: map<str,str>?}, LogUri: str?, Message: str?, ComputeTime: int(i64)?, FinishedAt: str(timestamp)?, StartedAt: str(timestamp)?}

@endpoint POST /
@required {MLModelId: str}
@optional {Verbose: bool}
@returns(200) {MLModelId: str?, TrainingDataSourceId: str?, CreatedByIamUser: str?, CreatedAt: str(timestamp)?, LastUpdatedAt: str(timestamp)?, Name: str?, Status: str?, SizeInBytes: int(i64)?, EndpointInfo: RealtimeEndpointInfo?{PeakRequestsPerSecond: int?, CreatedAt: str(timestamp)?, EndpointUrl: str?, EndpointStatus: str?}, TrainingParameters: map<str,str>?, InputDataLocationS3: str?, MLModelType: str?, ScoreThreshold: num(f32)?, ScoreThresholdLastUpdatedAt: str(timestamp)?, LogUri: str?, Message: str?, ComputeTime: int(i64)?, FinishedAt: str(timestamp)?, StartedAt: str(timestamp)?, Recipe: str?, Schema: str?}

@endpoint POST /
@required {MLModelId: str, Record: map<str,str>, PredictEndpoint: str}
@returns(200) {Prediction: Prediction?{predictedLabel: str?, predictedValue: num(f32)?, predictedScores: map<str,num(f32)>?, details: map<str,str>?}}

@endpoint POST /
@required {BatchPredictionId: str, BatchPredictionName: str}
@returns(200) {BatchPredictionId: str?}

@endpoint POST /
@required {DataSourceId: str, DataSourceName: str}
@returns(200) {DataSourceId: str?}

@endpoint POST /
@required {EvaluationId: str, EvaluationName: str}
@returns(200) {EvaluationId: str?}

@endpoint POST /
@required {MLModelId: str}
@optional {MLModelName: str, ScoreThreshold: num(f32)}
@returns(200) {MLModelId: str?}

@end
