Skip to content

Commit 535e2be

Browse files
Update 2025-06-10 (friendliai#64)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
1 parent dbe0fb9 commit 535e2be

42 files changed

Lines changed: 2025 additions & 1411 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.speakeasy/gen.lock

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,18 +3,18 @@ id: 37076cef-26b6-4da9-bb49-e710061cab77
33
management:
44
docChecksum: 394eead63f70eaaeb118d6663db1a426
55
docVersion: 0.1.0
6-
speakeasyVersion: 1.548.3
7-
generationVersion: 2.605.6
8-
releaseVersion: 0.10.1
9-
configChecksum: da867d28e85c89ff5b8a4ec27ab8b40e
6+
speakeasyVersion: 1.558.1
7+
generationVersion: 2.623.4
8+
releaseVersion: 0.10.2
9+
configChecksum: 19a1e6846115e745c5119596f66e9c1d
1010
repoURL: https://github.com/friendliai/friendli-python.git
1111
installationURL: https://github.com/friendliai/friendli-python.git
1212
published: true
1313
features:
1414
python:
1515
additionalDependencies: 1.0.0
1616
constsAndDefaults: 1.0.5
17-
core: 5.18.1
17+
core: 5.19.2
1818
defaultEnabledRetries: 0.2.0
1919
enumUnions: 0.1.0
2020
envVarSecurityUsage: 0.3.2
@@ -32,7 +32,7 @@ features:
3232
nullables: 1.0.1
3333
responseFormat: 1.0.1
3434
retries: 3.0.2
35-
sdkHooks: 1.0.1
35+
sdkHooks: 1.1.0
3636
serverEvents: 1.0.7
3737
serverEventsSentinels: 0.1.0
3838
unions: 3.0.4
@@ -1138,7 +1138,7 @@ examples:
11381138
dataset_id: "<id>"
11391139
split_id: "<id>"
11401140
requestBody:
1141-
multipart/form-data: {"file": {"": "x-file: example.file"}}
1141+
multipart/form-data: {"file": "x-file: example.file"}
11421142
responses:
11431143
"200":
11441144
application/json: {"samples": []}

.speakeasy/gen.yaml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ generation:
44
maintainOpenAPIOrder: true
55
usageSnippets:
66
optionalPropertyRendering: withExample
7+
sdkInitStyle: constructor
78
useClassNamesForArrayFields: true
89
fixes:
910
nameResolutionDec2023: true
@@ -16,12 +17,13 @@ generation:
1617
oAuth2ClientCredentialsEnabled: true
1718
oAuth2PasswordEnabled: true
1819
python:
19-
version: 0.10.1
20+
version: 0.10.2
2021
additionalDependencies:
2122
dev: {}
2223
main: {}
2324
authors:
2425
- Speakeasy
26+
baseErrorName: FriendliCoreError
2527
clientServerStatusCodesAsErrors: true
2628
defaultErrorName: SDKError
2729
description: Python Client SDK Generated by Speakeasy.

.speakeasy/workflow.lock

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
speakeasyVersion: 1.548.3
1+
speakeasyVersion: 1.558.1
22
sources:
33
Friendli-API-Schema:
44
sourceNamespace: friendli-api-schema
@@ -13,10 +13,10 @@ targets:
1313
sourceRevisionDigest: sha256:a08513faaad341054f136a6954151ad144fa18b593e64ed3a9bb0079b49a1bfb
1414
sourceBlobDigest: sha256:9fd7c5cd47290161ef36478f317089db122919843273f8b426fef6bc85b5c3fe
1515
codeSamplesNamespace: friendli-api-schema-code-samples
16-
codeSamplesRevisionDigest: sha256:d1b929640e67dba633c588e7f4a8ce925a6f71914d79dd5b1c12ba62f3d58999
16+
codeSamplesRevisionDigest: sha256:32773fbf55f9c5ef27e7b18a61cc62e3d65dc1e44d9505eb8169bc7d9efcb253
1717
workflow:
1818
workflowVersion: 1.0.0
19-
speakeasyVersion: 1.548.3
19+
speakeasyVersion: latest
2020
sources:
2121
Friendli-API-Schema:
2222
inputs:

.speakeasy/workflow.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
workflowVersion: 1.0.0
2-
speakeasyVersion: 1.548.3
2+
speakeasyVersion: latest
33
sources:
44
Friendli-API-Schema:
55
inputs:

README.md

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,7 @@ with SyncFriendli(
129129
],
130130
model="meta-llama-3.1-8b-instruct",
131131
max_tokens=200,
132+
stream=False,
132133
)
133134

134135
# Handle response
@@ -163,6 +164,7 @@ async def main():
163164
],
164165
model="meta-llama-3.1-8b-instruct",
165166
max_tokens=200,
167+
stream=False,
166168
)
167169

168170
# Handle response
@@ -195,6 +197,7 @@ with SyncFriendli(
195197
],
196198
model="meta-llama-3.1-8b-instruct",
197199
max_tokens=200,
200+
stream=False,
198201
tools=[
199202
{
200203
"type": "math:calculator",
@@ -230,6 +233,7 @@ async def main():
230233
],
231234
model="meta-llama-3.1-8b-instruct",
232235
max_tokens=200,
236+
stream=False,
233237
tools=[
234238
{
235239
"type": "math:calculator",
@@ -278,6 +282,7 @@ with SyncFriendli(
278282
],
279283
max_tokens=200,
280284
model="(adapter-route)",
285+
stream=False,
281286
)
282287

283288
# Handle response
@@ -451,6 +456,7 @@ with SyncFriendli(
451456
],
452457
model="(endpoint-id)",
453458
max_tokens=200,
459+
stream=True,
454460
)
455461

456462
with res as event_stream:
@@ -524,6 +530,7 @@ with SyncFriendli(
524530
],
525531
max_tokens=200,
526532
model="(adapter-route)",
533+
stream=False,
527534
retries=RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False),
528535
)
529536

@@ -555,6 +562,7 @@ with SyncFriendli(
555562
],
556563
max_tokens=200,
557564
model="(adapter-route)",
565+
stream=False,
558566
)
559567

560568
# Handle response
@@ -624,7 +632,7 @@ with SyncFriendli(
624632
server_url="https://api.friendli.ai",
625633
token=os.getenv("FRIENDLI_TOKEN", ""),
626634
) as friendli:
627-
res = friendli.dedicated.chat.complete(
635+
res = friendli.container.chat.complete(
628636
messages=[
629637
{
630638
"content": "You are a helpful assistant.",
@@ -637,6 +645,7 @@ with SyncFriendli(
637645
],
638646
max_tokens=200,
639647
model="(adapter-route)",
648+
stream=False,
640649
)
641650

642651
# Handle response
@@ -654,7 +663,7 @@ from friendli import SyncFriendli
654663
with SyncFriendli(
655664
token=os.getenv("FRIENDLI_TOKEN", ""),
656665
) as friendli:
657-
res = friendli.dedicated.chat.complete(
666+
res = friendli.container.chat.complete(
658667
messages=[
659668
{
660669
"content": "You are a helpful assistant.",
@@ -667,6 +676,7 @@ with SyncFriendli(
667676
],
668677
max_tokens=200,
669678
model="(adapter-route)",
679+
stream=False,
670680
server_url="http://localhost:8000",
671681
)
672682

USAGE.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ with SyncFriendli(
2626
],
2727
model="meta-llama-3.1-8b-instruct",
2828
max_tokens=200,
29+
stream=False,
2930
)
3031

3132
# Handle response
@@ -60,6 +61,7 @@ async def main():
6061
],
6162
model="meta-llama-3.1-8b-instruct",
6263
max_tokens=200,
64+
stream=False,
6365
)
6466

6567
# Handle response
@@ -92,6 +94,7 @@ with SyncFriendli(
9294
],
9395
model="meta-llama-3.1-8b-instruct",
9496
max_tokens=200,
97+
stream=False,
9598
tools=[
9699
{
97100
"type": "math:calculator",
@@ -127,6 +130,7 @@ async def main():
127130
],
128131
model="meta-llama-3.1-8b-instruct",
129132
max_tokens=200,
133+
stream=False,
130134
tools=[
131135
{
132136
"type": "math:calculator",

docs/sdks/chat/README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ with SyncFriendli(
3535
],
3636
max_tokens=200,
3737
model="(adapter-route)",
38+
stream=False,
3839
)
3940

4041
# Handle response
@@ -109,6 +110,7 @@ with SyncFriendli(
109110
],
110111
max_tokens=200,
111112
model="(adapter-route)",
113+
stream=True,
112114
)
113115

114116
with res as event_stream:

docs/sdks/dataset/README.md

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ from friendli import SyncFriendli
7979
with SyncFriendli(
8080
token=os.getenv("FRIENDLI_TOKEN", ""),
8181
) as friendli:
82-
res = friendli.dataset.list_datasets(project_id="<id>")
82+
res = friendli.dataset.list_datasets(project_id="<id>", limit=20)
8383

8484
# Handle response
8585
print(res)
@@ -402,7 +402,7 @@ from friendli import SyncFriendli
402402
with SyncFriendli(
403403
token=os.getenv("FRIENDLI_TOKEN", ""),
404404
) as friendli:
405-
res = friendli.dataset.list_splits(dataset_id="494482")
405+
res = friendli.dataset.list_splits(dataset_id="494482", limit=20)
406406

407407
# Handle response
408408
print(res)
@@ -574,7 +574,9 @@ from friendli import SyncFriendli
574574
with SyncFriendli(
575575
token=os.getenv("FRIENDLI_TOKEN", ""),
576576
) as friendli:
577-
res = friendli.dataset.list_samples(dataset_id="282743", split_id="505420")
577+
res = friendli.dataset.list_samples(
578+
dataset_id="282743", split_id="505420", limit=20
579+
)
578580

579581
# Handle response
580582
print(res)

docs/sdks/endpoint/README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ from friendli import SyncFriendli
139139
with SyncFriendli(
140140
token=os.getenv("FRIENDLI_TOKEN", ""),
141141
) as friendli:
142-
res = friendli.dedicated.endpoint.list()
142+
res = friendli.dedicated.endpoint.list(project_id="", limit=20)
143143

144144
# Handle response
145145
print(res)
@@ -302,7 +302,7 @@ from friendli import SyncFriendli
302302
with SyncFriendli(
303303
token=os.getenv("FRIENDLI_TOKEN", ""),
304304
) as friendli:
305-
res = friendli.dedicated.endpoint.get_version_history(endpoint_id="<id>")
305+
res = friendli.dedicated.endpoint.get_version_history(endpoint_id="<id>", limit=20)
306306

307307
# Handle response
308308
print(res)
@@ -587,7 +587,7 @@ from friendli import SyncFriendli
587587
with SyncFriendli(
588588
token=os.getenv("FRIENDLI_TOKEN", ""),
589589
) as friendli:
590-
res = friendli.dedicated.endpoint.list_beta()
590+
res = friendli.dedicated.endpoint.list_beta(project_id="", limit=20)
591591

592592
# Handle response
593593
print(res)

docs/sdks/friendlicorechat/README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ with SyncFriendli(
3535
],
3636
model="(endpoint-id)",
3737
max_tokens=200,
38+
stream=False,
3839
)
3940

4041
# Handle response
@@ -109,6 +110,7 @@ with SyncFriendli(
109110
],
110111
model="(endpoint-id)",
111112
max_tokens=200,
113+
stream=True,
112114
)
113115

114116
with res as event_stream:

0 commit comments

Comments
 (0)