Add support for OpenAI API : offline batch(file) processing (#699)

Co-authored-by: hnyls2002 <hnyls2002@gmail.com>
This commit is contained in:
yichuan~
2024-07-30 04:07:18 +08:00
committed by GitHub
parent eba458bd19
commit 084fa54d37
10 changed files with 839 additions and 154 deletions

View File

@@ -13,6 +13,17 @@ response = client.completions.create(
print(response)
# Text completion
response = client.completions.create(
model="default",
prompt="I am a robot and I want to study like humans. Now let's tell a story. Once upon a time, there was a little",
n=1,
temperature=0.8,
max_tokens=32,
)
print(response)
# Text completion
response = client.completions.create(
model="default",
@@ -24,6 +35,17 @@ response = client.completions.create(
print(response)
# Text completion
response = client.completions.create(
model="default",
prompt=["The name of the famous soccer player is"],
n=1,
temperature=0.8,
max_tokens=128,
)
print(response)
# Text completion
response = client.completions.create(
model="default",
@@ -60,6 +82,21 @@ response = client.completions.create(
)
print(response)
# Chat completion
response = client.chat.completions.create(
model="default",
messages=[
{"role": "system", "content": "You are a helpful AI assistant"},
{"role": "user", "content": "List 3 countries and their capitals."},
],
temperature=0.8,
max_tokens=64,
logprobs=True,
n=1,
)
print(response)
# Chat completion
response = client.chat.completions.create(
model="default",