Skip to content

Commit 214ab44

Browse files
authored
updating model_download function name, autogen files (#155)
1 parent 9d0873a commit 214ab44

File tree

4 files changed

+37
-2
lines changed

4 files changed

+37
-2
lines changed

‎launch/api_client/apis/tags/default_api.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,8 +187,8 @@ class DefaultApi(
187187
CreateTriggerV1TriggersPost,
188188
DeleteFileV1FilesFileIdDelete,
189189
DeleteModelEndpointV1ModelEndpointsModelEndpointIdDelete,
190-
DownloadModelEndpointV1LlmModelEndpointsDownloadPost,
191190
DeleteTriggerV1TriggersTriggerIdDelete,
191+
DownloadModelEndpointV1LlmModelEndpointsDownloadPost,
192192
GetAsyncInferenceTaskV1AsyncTasksTaskIdGet,
193193
GetBatchJobV1BatchJobsBatchJobIdGet,
194194
GetDockerImageBatchJobModelBundleV1DockerImageBatchJobBundlesDockerImageBatchJobBundleIdGet,

‎launch/api_client/model/get_llm_model_endpoint_v1_response.py‎

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ class MetaOapg:
3636
"name",
3737
"id",
3838
"source",
39+
"status",
3940
}
4041

4142
classproperties:
@@ -51,6 +52,10 @@ def inference_framework() -> typing.Type["LLMInferenceFramework"]:
5152
defsource() ->typing.Type["LLMSource"]:
5253
returnLLMSource
5354

55+
@staticmethod
56+
defstatus() ->typing.Type["ModelEndpointStatus"]:
57+
returnModelEndpointStatus
58+
5459
inference_framework_image_tag=schemas.StrSchema
5560
model_name=schemas.StrSchema
5661
num_shards=schemas.IntSchema
@@ -68,6 +73,7 @@ def spec() -> typing.Type["GetModelEndpointV1Response"]:
6873
"inference_framework": inference_framework,
6974
"name": name,
7075
"source": source,
76+
"status": status,
7177
"inference_framework_image_tag": inference_framework_image_tag,
7278
"model_name": model_name,
7379
"num_shards": num_shards,
@@ -79,6 +85,7 @@ def spec() -> typing.Type["GetModelEndpointV1Response"]:
7985
name: MetaOapg.properties.name
8086
id: MetaOapg.properties.id
8187
source: "LLMSource"
88+
status: "ModelEndpointStatus"
8289

8390
@typing.overload
8491
def__getitem__(self, name: typing_extensions.Literal["id"]) ->MetaOapg.properties.id:
@@ -96,6 +103,10 @@ def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.prope
96103
def__getitem__(self, name: typing_extensions.Literal["source"]) ->"LLMSource":
97104
...
98105

106+
@typing.overload
107+
def__getitem__(self, name: typing_extensions.Literal["status"]) ->"ModelEndpointStatus":
108+
...
109+
99110
@typing.overload
100111
def__getitem__(
101112
self, name: typing_extensions.Literal["inference_framework_image_tag"]
@@ -130,6 +141,7 @@ def __getitem__(
130141
"inference_framework",
131142
"name",
132143
"source",
144+
"status",
133145
"inference_framework_image_tag",
134146
"model_name",
135147
"num_shards",
@@ -158,6 +170,10 @@ def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> MetaOapg.pro
158170
defget_item_oapg(self, name: typing_extensions.Literal["source"]) ->"LLMSource":
159171
...
160172

173+
@typing.overload
174+
defget_item_oapg(self, name: typing_extensions.Literal["status"]) ->"ModelEndpointStatus":
175+
...
176+
161177
@typing.overload
162178
defget_item_oapg(
163179
self, name: typing_extensions.Literal["inference_framework_image_tag"]
@@ -198,6 +214,7 @@ def get_item_oapg(
198214
"inference_framework",
199215
"name",
200216
"source",
217+
"status",
201218
"inference_framework_image_tag",
202219
"model_name",
203220
"num_shards",
@@ -225,6 +242,7 @@ def __new__(
225242
str,
226243
],
227244
source: "LLMSource",
245+
status: "ModelEndpointStatus",
228246
inference_framework_image_tag: typing.Union[
229247
MetaOapg.properties.inference_framework_image_tag, str, schemas.Unset
230248
] =schemas.unset,
@@ -257,6 +275,7 @@ def __new__(
257275
name=name,
258276
id=id,
259277
source=source,
278+
status=status,
260279
inference_framework_image_tag=inference_framework_image_tag,
261280
model_name=model_name,
262281
num_shards=num_shards,
@@ -274,4 +293,5 @@ def __new__(
274293
LLMInferenceFramework,
275294
)
276295
fromlaunch.api_client.model.llm_sourceimportLLMSource
296+
fromlaunch.api_client.model.model_endpoint_statusimportModelEndpointStatus
277297
fromlaunch.api_client.model.quantizationimportQuantization

‎launch/api_client/model/get_llm_model_endpoint_v1_response.pyi‎

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
3434
"name",
3535
"id",
3636
"source",
37+
"status",
3738
}
3839

3940
classproperties:
@@ -47,6 +48,9 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
4748
@staticmethod
4849
defsource() ->typing.Type["LLMSource"]:
4950
returnLLMSource
51+
@staticmethod
52+
defstatus() ->typing.Type["ModelEndpointStatus"]:
53+
returnModelEndpointStatus
5054
inference_framework_image_tag=schemas.StrSchema
5155
model_name=schemas.StrSchema
5256
num_shards=schemas.IntSchema
@@ -62,6 +66,7 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
6266
"inference_framework": inference_framework,
6367
"name": name,
6468
"source": source,
69+
"status": status,
6570
"inference_framework_image_tag": inference_framework_image_tag,
6671
"model_name": model_name,
6772
"num_shards": num_shards,
@@ -72,6 +77,7 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
7277
name: MetaOapg.properties.name
7378
id: MetaOapg.properties.id
7479
source: "LLMSource"
80+
status: "ModelEndpointStatus"
7581

7682
@typing.overload
7783
def__getitem__(self, name: typing_extensions.Literal["id"]) ->MetaOapg.properties.id: ...
@@ -82,6 +88,8 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
8288
@typing.overload
8389
def__getitem__(self, name: typing_extensions.Literal["source"]) ->"LLMSource": ...
8490
@typing.overload
91+
def__getitem__(self, name: typing_extensions.Literal["status"]) ->"ModelEndpointStatus": ...
92+
@typing.overload
8593
def__getitem__(
8694
self, name: typing_extensions.Literal["inference_framework_image_tag"]
8795
) ->MetaOapg.properties.inference_framework_image_tag: ...
@@ -103,6 +111,7 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
103111
"inference_framework",
104112
"name",
105113
"source",
114+
"status",
106115
"inference_framework_image_tag",
107116
"model_name",
108117
"num_shards",
@@ -123,6 +132,8 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
123132
@typing.overload
124133
defget_item_oapg(self, name: typing_extensions.Literal["source"]) ->"LLMSource": ...
125134
@typing.overload
135+
defget_item_oapg(self, name: typing_extensions.Literal["status"]) ->"ModelEndpointStatus": ...
136+
@typing.overload
126137
defget_item_oapg(
127138
self, name: typing_extensions.Literal["inference_framework_image_tag"]
128139
) ->typing.Union[MetaOapg.properties.inference_framework_image_tag, schemas.Unset]: ...
@@ -152,6 +163,7 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
152163
"inference_framework",
153164
"name",
154165
"source",
166+
"status",
155167
"inference_framework_image_tag",
156168
"model_name",
157169
"num_shards",
@@ -178,6 +190,7 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
178190
str,
179191
],
180192
source: "LLMSource",
193+
status: "ModelEndpointStatus",
181194
inference_framework_image_tag: typing.Union[
182195
MetaOapg.properties.inference_framework_image_tag, str, schemas.Unset
183196
] =schemas.unset,
@@ -210,6 +223,7 @@ class GetLLMModelEndpointV1Response(schemas.DictSchema):
210223
name=name,
211224
id=id,
212225
source=source,
226+
status=status,
213227
inference_framework_image_tag=inference_framework_image_tag,
214228
model_name=model_name,
215229
num_shards=num_shards,
@@ -224,4 +238,5 @@ from launch_client.model.get_model_endpoint_v1_response import (
224238
)
225239
fromlaunch_client.model.llm_inference_frameworkimportLLMInferenceFramework
226240
fromlaunch_client.model.llm_sourceimportLLMSource
241+
fromlaunch_client.model.model_endpoint_statusimportModelEndpointStatus
227242
fromlaunch_client.model.quantizationimportQuantization

‎launch/client.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3252,7 +3252,7 @@ def get_file_content(
32523252

32533253
returnresp
32543254

3255-
defdownload_model_weights(self, model_name: str, download_format: str="huggingface") ->ModelDownloadResponse:
3255+
defmodel_download(self, model_name: str, download_format: str="hugging_face") ->ModelDownloadResponse:
32563256
"""
32573257
download a finetuned model
32583258

0 commit comments

Comments
(0)