1---
2id: post-batch-predict
3title: "Batch ML Predictions"
4description: "Perform a batch of ML predictions, using multiple models, in one request. This is useful for ensembling or A/B testing different models."
5sidebar_label: "Batch ML Predictions"
6hide_title: true
7hide_table_of_contents: true
8api: eJzNVttu2zgQ/RVinnYBxpe0Bbp6S4o+BEixQZN9MgyXpkYWW4pUeXFqGPr3xVDSWrLcbIvuwxqI4fAyc2bmzOEcIYidh2wFN3ew5pCjl07VQVkDGTygK6yrmGBbEWTJbME+3LPaYa4kHfGcRa/MjlVRB1VrZJXNUXvOlGHWIHP4NaIPM/ZUKs+UZ9FjETUrrGNoPFZbTdetYzfzWxbQB/o3V0WBDk3ozM2Ag63RCfJ5l0MGtfVhkzBtOjTAoXN2a/MDZMezUG5TACfoPTQmrQlCGfIrmFY+UJTTcz5h9jVKVSjZAQMOdB1NIIeirrWSCeT8syevR/CyxErQr3CoETKw28+Y0NaOQgoKPe0Ocjo4LJwTB+CgAlb+khECpxzmVMAEaWNEhVTIsfnB3smKD06ZHTQNh6CCpqWHFsbHNmZozgnxVOJLOeIMhSy7LB0opaHsOMGCpeKnLNLi6fYIQCrTFAV+E1WtcZKp1Tg0yJ3a48YHEfxGQMNf2N5Cs27I9SmHwUVMC762xreZu14s/p1MnklL+ALmzEcp0fsian34FX4MSzuMmUMe207YVH5a6eHmyaoyAXfogEOljKpiBdmi4f8F6yiZkVANEv2LEKeUC6pCFsQXNESjPtdnPGK/KcMqpbXyKK3J/e/n4aJz1m0q9F7sBo2w6juBg4laT2WQIKS7rLvLVJGc9xpSCKUxJ6K+1GiXzNJJaqZTn0SP+cUu6Y3v0XnVsucn8He3pr5gxIOh1Z4HvdFzPphYbVPB6I0QATIotBXfUY2RWvioA2fhUCsptD4wYVhyRuiSDU9GOmpNuo/MtXt9NAP8fJJ0NFT/FTy2jQkcbkXeawuHOxPQGaHfU4EpzDMtfGxRXFTJVieg+b6C9SdGEjZqgLdLPtG0PvIB5hdkbkIMWM4WMK7qajF7/YYvZunvzXrcodnra5LKn/G6/WGvr8jjkr5eXXBLEtxweHNJZfvKMI9uj65rQStldA5zlkcqcDeZjBgw0NyA38K81kJdVtv+CRyUB24Miwa/1ShJzs+cPpdKE98sJeiSe2IKRVRhKG0/qlBaRCghg/l+OT9NLG1gbc2j05BBGUKdzefaSqFL60P2dvHHYqIc97TNctyjtnVFg1JracbeWVOoXXTInlUo2aerK7L4aQbNmtzJ6FQ4JH+iVpsvSL/XtEcP0sfTAPX+Bx/cPoHrhoMyhU257TrBRUPCfVH3fK0k5ux0RCtJAyHd70zf1EKWeHWdWKVyNEEVCt14p+EwJOByOVtcReOD2OrUdZT9SpiB2fbt/nDPHsaP6hDi8USh/9MM3JF2wOmGt7w5dvxawX556kFSMyIRrR+PW+HxL6ebhpa/RnSp9hz2wqmULmIChxJFTlleHSHRA961qbh6Iu90XMc0JZxPMyQh7Y0bKbEOL55dDzrk4c/HJ+Cw7eZ2ipfYI55p1BDPkEGa//8ZU9LaEbQwu5gecmht0udvS4J9SQ==
9sidebar_class_name: "post api-method"
10info_path: docs/api/HTTP/runtime
11custom_edit_url: null
12proxy: http://localhost:8090
13---
14
15import MethodEndpoint from "@theme/ApiExplorer/MethodEndpoint";
16import ParamsDetails from "@theme/ParamsDetails";
17import RequestSchema from "@theme/RequestSchema";
18import StatusCodes from "@theme/StatusCodes";
19import OperationTabs from "@theme/OperationTabs";
20import TabItem from "@theme/TabItem";
21import Heading from "@theme/Heading";
22
23<Heading
24 as={"h1"}
25 className={"openapi__heading"}
26 children={"Batch ML Predictions"}
27>
28</Heading>
29
30<MethodEndpoint
31 method={"post"}
32 path={"/v1/predict"}
33 context={"endpoint"}
34>
35
36</MethodEndpoint>
37
38
39
40Perform a batch of ML predictions, using multiple models, in one request. This is useful for ensembling or A/B testing different models.
41
42<Heading
43 id={"request"}
44 as={"h2"}
45 className={"openapi-tabs__heading"}
46 children={"Request"}
47>
48</Heading>
49
50<ParamsDetails
51 parameters={undefined}
52>
53
54</ParamsDetails>
55
56<RequestSchema
57 title={"Body"}
58 body={{"description":"Batch prediction request containing a list of prediction requests for specific models","content":{"application/json":{"schema":{"type":"object","properties":{"predictions":{"type":"array","items":{"type":"object","required":["model_name"],"properties":{"model_name":{"type":"string"}},"title":"PredictRequest"},"description":"The list of prediction requests, each specifying the model to use for the prediction"}},"title":"BatchPredictRequest"},"example":{"predictions":[{"model_name":"drive_stats_a"},{"model_name":"drive_stats_b"}]}}},"required":true}}
59>
60
61</RequestSchema>
62
63<StatusCodes
64 id={undefined}
65 label={undefined}
66 responses={{"200":{"description":"Batch predictions completed successfully","content":{"application/json":{"schema":{"type":"object","required":["predictions","duration_ms"],"properties":{"duration_ms":{"type":"integer","minimum":0},"predictions":{"type":"array","items":{"type":"object","required":["status","model_name","duration_ms"],"properties":{"duration_ms":{"type":"integer","description":"The time taken to complete the prediction (in milliseconds)","minimum":0},"error_message":{"type":["string","null"],"description":"The error message if the request failed"},"model_name":{"type":"string","description":"The name of the model used for the prediction"},"model_version":{"type":["string","null"],"description":"The version of the model used"},"prediction":{"type":["array","null"],"items":{"type":"number","format":"float"},"description":"The prediction result, typically an array of floats"},"status":{"description":"The status of the prediction","type":"string","enum":["Success","BadRequest","InternalError"],"title":"PredictStatus"}},"title":"PredictResponse"}}},"title":"BatchPredictResponse"},"example":{"duration_ms":81,"predictions":[{"status":"Success","model_name":"drive_stats_a","model_version":"1.0","prediction":[0.45,0.5,0.55],"duration_ms":42},{"status":"Success","model_name":"drive_stats_b","model_version":"1.0","prediction":[0.43,0.51,0.53],"duration_ms":42}]}}}},"500":{"description":"Internal server error occurred during batch prediction","content":{"text/plain":{"schema":{"type":"string"},"example":"An unexpected error occurred while processing batch predictions"}}}}}
67>
68
69</StatusCodes>
70
71
72
1---
2id: post-batch-predict
3title: "Batch ML Predictions"
4description: "Perform a batch of ML predictions, using multiple models, in one request. This is useful for ensembling or A/B testing different models."
5sidebar_label: "Batch ML Predictions"
6hide_title: true
7hide_table_of_contents: true
8api: eJzNVttu2zgQ/RVinnYBxpe0Bbp6S4o+BEixQZN9MgyXpkYWW4pUeXFqGPr3xVDSWrLcbIvuwxqI4fAyc2bmzOEcIYidh2wFN3ew5pCjl07VQVkDGTygK6yrmGBbEWTJbME+3LPaYa4kHfGcRa/MjlVRB1VrZJXNUXvOlGHWIHP4NaIPM/ZUKs+UZ9FjETUrrGNoPFZbTdetYzfzWxbQB/o3V0WBDk3ozM2Ag63RCfJ5l0MGtfVhkzBtOjTAoXN2a/MDZMezUG5TACfoPTQmrQlCGfIrmFY+UJTTcz5h9jVKVSjZAQMOdB1NIIeirrWSCeT8syevR/CyxErQr3CoETKw28+Y0NaOQgoKPe0Ocjo4LJwTB+CgAlb+khECpxzmVMAEaWNEhVTIsfnB3smKD06ZHTQNh6CCpqWHFsbHNmZozgnxVOJLOeIMhSy7LB0opaHsOMGCpeKnLNLi6fYIQCrTFAV+E1WtcZKp1Tg0yJ3a48YHEfxGQMNf2N5Cs27I9SmHwUVMC762xreZu14s/p1MnklL+ALmzEcp0fsian34FX4MSzuMmUMe207YVH5a6eHmyaoyAXfogEOljKpiBdmi4f8F6yiZkVANEv2LEKeUC6pCFsQXNESjPtdnPGK/KcMqpbXyKK3J/e/n4aJz1m0q9F7sBo2w6juBg4laT2WQIKS7rLvLVJGc9xpSCKUxJ6K+1GiXzNJJaqZTn0SP+cUu6Y3v0XnVsucn8He3pr5gxIOh1Z4HvdFzPphYbVPB6I0QATIotBXfUY2RWvioA2fhUCsptD4wYVhyRuiSDU9GOmpNuo/MtXt9NAP8fJJ0NFT/FTy2jQkcbkXeawuHOxPQGaHfU4EpzDMtfGxRXFTJVieg+b6C9SdGEjZqgLdLPtG0PvIB5hdkbkIMWM4WMK7qajF7/YYvZunvzXrcodnra5LKn/G6/WGvr8jjkr5eXXBLEtxweHNJZfvKMI9uj65rQStldA5zlkcqcDeZjBgw0NyA38K81kJdVtv+CRyUB24Miwa/1ShJzs+cPpdKE98sJeiSe2IKRVRhKG0/qlBaRCghg/l+OT9NLG1gbc2j05BBGUKdzefaSqFL60P2dvHHYqIc97TNctyjtnVFg1JracbeWVOoXXTInlUo2aerK7L4aQbNmtzJ6FQ4JH+iVpsvSL/XtEcP0sfTAPX+Bx/cPoHrhoMyhU257TrBRUPCfVH3fK0k5ux0RCtJAyHd70zf1EKWeHWdWKVyNEEVCt14p+EwJOByOVtcReOD2OrUdZT9SpiB2fbt/nDPHsaP6hDi8USh/9MM3JF2wOmGt7w5dvxawX556kFSMyIRrR+PW+HxL6ebhpa/RnSp9hz2wqmULmIChxJFTlleHSHRA961qbh6Iu90XMc0JZxPMyQh7Y0bKbEOL55dDzrk4c/HJ+Cw7eZ2ipfYI55p1BDPkEGa//8ZU9LaEbQwu5gecmht0udvS4J9SQ==
9sidebar_class_name: "post api-method"
10info_path: docs/api/HTTP/runtime
11custom_edit_url: null
12proxy: http://localhost:8090
13---
14
15import MethodEndpoint from "@theme/ApiExplorer/MethodEndpoint";
16import ParamsDetails from "@theme/ParamsDetails";
17import RequestSchema from "@theme/RequestSchema";
18import StatusCodes from "@theme/StatusCodes";
19import OperationTabs from "@theme/OperationTabs";
20import TabItem from "@theme/TabItem";
21import Heading from "@theme/Heading";
22
23<Heading
24 as={"h1"}
25 className={"openapi__heading"}
26 children={"Batch ML Predictions"}
27>
28</Heading>
29
30<MethodEndpoint
31 method={"post"}
32 path={"/v1/predict"}
33 context={"endpoint"}
34>
35
36</MethodEndpoint>
37
38
39
40Perform a batch of ML predictions, using multiple models, in one request. This is useful for ensembling or A/B testing different models.
41
42<Heading
43 id={"request"}
44 as={"h2"}
45 className={"openapi-tabs__heading"}
46 children={"Request"}
47>
48</Heading>
49
50<ParamsDetails
51 parameters={undefined}
52>
53
54</ParamsDetails>
55
56<RequestSchema
57 title={"Body"}
58 body={{"description":"Batch prediction request containing a list of prediction requests for specific models","content":{"application/json":{"schema":{"type":"object","properties":{"predictions":{"type":"array","items":{"type":"object","required":["model_name"],"properties":{"model_name":{"type":"string"}},"title":"PredictRequest"},"description":"The list of prediction requests, each specifying the model to use for the prediction"}},"title":"BatchPredictRequest"},"example":{"predictions":[{"model_name":"drive_stats_a"},{"model_name":"drive_stats_b"}]}}},"required":true}}
59>
60
61</RequestSchema>
62
63<StatusCodes
64 id={undefined}
65 label={undefined}
66 responses={{"200":{"description":"Batch predictions completed successfully","content":{"application/json":{"schema":{"type":"object","required":["predictions","duration_ms"],"properties":{"duration_ms":{"type":"integer","minimum":0},"predictions":{"type":"array","items":{"type":"object","required":["status","model_name","duration_ms"],"properties":{"duration_ms":{"type":"integer","description":"The time taken to complete the prediction (in milliseconds)","minimum":0},"error_message":{"type":["string","null"],"description":"The error message if the request failed"},"model_name":{"type":"string","description":"The name of the model used for the prediction"},"model_version":{"type":["string","null"],"description":"The version of the model used"},"prediction":{"type":["array","null"],"items":{"type":"number","format":"float"},"description":"The prediction result, typically an array of floats"},"status":{"description":"The status of the prediction","type":"string","enum":["Success","BadRequest","InternalError"],"title":"PredictStatus"}},"title":"PredictResponse"}}},"title":"BatchPredictResponse"},"example":{"duration_ms":81,"predictions":[{"status":"Success","model_name":"drive_stats_a","model_version":"1.0","prediction":[0.45,0.5,0.55],"duration_ms":42},{"status":"Success","model_name":"drive_stats_b","model_version":"1.0","prediction":[0.43,0.51,0.53],"duration_ms":42}]}}}},"500":{"description":"Internal server error occurred during batch prediction","content":{"text/plain":{"schema":{"type":"string"},"example":"An unexpected error occurred while processing batch predictions"}}}}}
67>
68
69</StatusCodes>
70
71
72