Add support for logprobs in OpenAI chat API (#852)
This commit is contained in:
@@ -106,13 +106,12 @@ response = client.chat.completions.create(
|
||||
{"role": "user", "content": "List 3 countries and their capitals."},
|
||||
],
|
||||
temperature=0.8,
|
||||
max_tokens=64,
|
||||
max_tokens=1,
|
||||
logprobs=True,
|
||||
n=1,
|
||||
top_logprobs=3,
|
||||
)
|
||||
print(response)
|
||||
|
||||
|
||||
# Chat completion
|
||||
response = client.chat.completions.create(
|
||||
model="default",
|
||||
@@ -121,8 +120,34 @@ response = client.chat.completions.create(
|
||||
{"role": "user", "content": "List 3 countries and their capitals."},
|
||||
],
|
||||
temperature=0.8,
|
||||
max_tokens=64,
|
||||
max_tokens=1,
|
||||
n=1,
|
||||
)
|
||||
print(response)
|
||||
|
||||
# Chat completion
|
||||
response = client.chat.completions.create(
|
||||
model="default",
|
||||
messages=[
|
||||
{"role": "system", "content": "You are a helpful AI assistant"},
|
||||
{"role": "user", "content": "List 3 countries and their capitals."},
|
||||
],
|
||||
temperature=0.8,
|
||||
max_tokens=1,
|
||||
logprobs=True,
|
||||
top_logprobs=3,
|
||||
)
|
||||
print(response)
|
||||
|
||||
# Chat completion
|
||||
response = client.chat.completions.create(
|
||||
model="default",
|
||||
messages=[
|
||||
{"role": "system", "content": "You are a helpful AI assistant"},
|
||||
{"role": "user", "content": "List 3 countries and their capitals."},
|
||||
],
|
||||
temperature=0.8,
|
||||
max_tokens=1,
|
||||
n=4,
|
||||
)
|
||||
print(response)
|
||||
|
||||
Reference in New Issue
Block a user