release initial code
Co-authored-by: Ying Sheng <sqy1415@gmail.com> Co-authored-by: Liangsheng Yin <hnyls2002@gmail.com> Co-authored-by: Zhiqiang Xie <xiezhq@stanford.edu> Co-authored-by: parasol-aser <3848358+parasol-aser@users.noreply.github.com> Co-authored-by: LiviaSun <33578456+ChuyueSun@users.noreply.github.com> Co-authored-by: Cody Yu <hao.yu.cody@gmail.com>
This commit is contained in:
19
examples/quick_start/anthropic_example_chat.py
Normal file
19
examples/quick_start/anthropic_example_chat.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from sglang import function, system, user, assistant, gen, set_default_backend, Anthropic
|
||||
|
||||
|
||||
@function
|
||||
def multi_turn_question(s, question_1, question_2):
|
||||
s += user(question_1)
|
||||
s += assistant(gen("answer_1", max_tokens=256))
|
||||
s += user(question_2)
|
||||
s += assistant(gen("answer_2", max_tokens=256))
|
||||
|
||||
set_default_backend(Anthropic("claude-2"))
|
||||
|
||||
state = multi_turn_question.run(
|
||||
question_1="What is the capital of the United States?",
|
||||
question_2="List two local attractions.",
|
||||
)
|
||||
|
||||
for m in state.messages():
|
||||
print(m["role"], ":", m["content"])
|
||||
26
examples/quick_start/anthropic_example_complete.py
Normal file
26
examples/quick_start/anthropic_example_complete.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from sglang import function, gen, set_default_backend, Anthropic
|
||||
|
||||
|
||||
@function
|
||||
def few_shot_qa(s, question):
|
||||
s += (
|
||||
"""
|
||||
\n\nHuman: What is the capital of France?
|
||||
\n\nAssistant: Paris
|
||||
\n\nHuman: What is the capital of Germany?
|
||||
\n\nAssistant: Berlin
|
||||
\n\nHuman: What is the capital of Italy?
|
||||
\n\nAssistant: Rome
|
||||
""")
|
||||
s += "\n\nHuman: " + question + "\n"
|
||||
s += "\n\nAssistant:" + gen("answer", stop="\n", temperature=0)
|
||||
|
||||
|
||||
set_default_backend(Anthropic("claude-2"))
|
||||
|
||||
state = few_shot_qa.run(question="What is the capital of the United States?")
|
||||
answer = state["answer"].strip().lower()
|
||||
|
||||
assert "washington" in answer, f"answer: {state['answer']}"
|
||||
|
||||
print(state.text())
|
||||
20
examples/quick_start/anthropic_example_stream.py
Normal file
20
examples/quick_start/anthropic_example_stream.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from sglang import function, system, user, assistant, gen, set_default_backend, Anthropic
|
||||
|
||||
|
||||
@function
|
||||
def multi_turn_question(s, question_1, question_2):
|
||||
s += user(question_1)
|
||||
s += assistant(gen("answer_1", max_tokens=256))
|
||||
s += user(question_2)
|
||||
s += assistant(gen("answer_2", max_tokens=256))
|
||||
|
||||
set_default_backend(Anthropic("claude-2"))
|
||||
|
||||
state = multi_turn_question.run(
|
||||
question_1="What is the capital of the United States?",
|
||||
question_2="List two local attractions.",
|
||||
stream=True
|
||||
)
|
||||
|
||||
for out in state.text_iter():
|
||||
print(out, end="", flush=True)
|
||||
44
examples/quick_start/more_stream_methods.py
Normal file
44
examples/quick_start/more_stream_methods.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import asyncio
|
||||
import sglang as sgl
|
||||
|
||||
|
||||
@sgl.function
|
||||
def multi_turn_question(s, question_1, question_2):
|
||||
s += sgl.system("You are a helpful assistant.")
|
||||
s += sgl.user(question_1)
|
||||
s += sgl.assistant(sgl.gen("answer_1", max_tokens=256))
|
||||
s += sgl.user(question_2)
|
||||
s += sgl.assistant(sgl.gen("answer_2", max_tokens=256))
|
||||
|
||||
|
||||
sgl.set_default_backend(sgl.OpenAI("gpt-3.5-turbo"))
|
||||
#sgl.set_default_backend(sgl.RuntimeEndpoint("http://localhost:30000"))
|
||||
|
||||
|
||||
def stream_a_variable():
|
||||
state = multi_turn_question.run(
|
||||
question_1="What is the capital of the United States?",
|
||||
question_2="List two local attractions.",
|
||||
stream=True
|
||||
)
|
||||
|
||||
for out in state.text_iter(var_name="answer_2"):
|
||||
print(out, end="", flush=True)
|
||||
print()
|
||||
|
||||
|
||||
async def async_stream():
|
||||
state = multi_turn_question.run(
|
||||
question_1="What is the capital of the United States?",
|
||||
question_2="List two local attractions.",
|
||||
stream=True
|
||||
)
|
||||
|
||||
async for out in state.text_async_iter(var_name="answer_2"):
|
||||
print(out, end="", flush=True)
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
#stream_a_variable()
|
||||
asyncio.run(async_stream())
|
||||
20
examples/quick_start/openai_example_chat.py
Normal file
20
examples/quick_start/openai_example_chat.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from sglang import function, system, user, assistant, gen, set_default_backend, OpenAI
|
||||
|
||||
|
||||
@function
|
||||
def multi_turn_question(s, question_1, question_2):
|
||||
s += system("You are a helpful assistant.")
|
||||
s += user(question_1)
|
||||
s += assistant(gen("answer_1", max_tokens=256))
|
||||
s += user(question_2)
|
||||
s += assistant(gen("answer_2", max_tokens=256))
|
||||
|
||||
set_default_backend(OpenAI("gpt-3.5-turbo"))
|
||||
|
||||
state = multi_turn_question.run(
|
||||
question_1="What is the capital of the United States?",
|
||||
question_2="List two local attractions.",
|
||||
)
|
||||
|
||||
for m in state.messages():
|
||||
print(m["role"], ":", m["content"])
|
||||
26
examples/quick_start/openai_example_complete.py
Normal file
26
examples/quick_start/openai_example_complete.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from sglang import function, gen, set_default_backend, OpenAI
|
||||
|
||||
|
||||
@function
|
||||
def few_shot_qa(s, question):
|
||||
s += (
|
||||
"""The following are questions with answers.
|
||||
Q: What is the capital of France?
|
||||
A: Paris
|
||||
Q: What is the capital of Germany?
|
||||
A: Berlin
|
||||
Q: What is the capital of Italy?
|
||||
A: Rome
|
||||
""")
|
||||
s += "Q: " + question + "\n"
|
||||
s += "A:" + gen("answer", stop="\n", temperature=0)
|
||||
|
||||
|
||||
set_default_backend(OpenAI("gpt-3.5-turbo-instruct"))
|
||||
|
||||
state = few_shot_qa.run(question="What is the capital of the United States?")
|
||||
answer = state["answer"].strip().lower()
|
||||
|
||||
assert "washington" in answer, f"answer: {state['answer']}"
|
||||
|
||||
print(state.text())
|
||||
21
examples/quick_start/openai_example_stream.py
Normal file
21
examples/quick_start/openai_example_stream.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from sglang import function, system, user, assistant, gen, set_default_backend, OpenAI
|
||||
|
||||
|
||||
@function
|
||||
def multi_turn_question(s, question_1, question_2):
|
||||
s += system("You are a helpful assistant.")
|
||||
s += user(question_1)
|
||||
s += assistant(gen("answer_1", max_tokens=256))
|
||||
s += user(question_2)
|
||||
s += assistant(gen("answer_2", max_tokens=256))
|
||||
|
||||
set_default_backend(OpenAI("gpt-3.5-turbo"))
|
||||
|
||||
state = multi_turn_question.run(
|
||||
question_1="What is the capital of the United States?",
|
||||
question_2="List two local attractions.",
|
||||
stream=True
|
||||
)
|
||||
|
||||
for out in state.text_iter():
|
||||
print(out, end="", flush=True)
|
||||
26
examples/quick_start/srt_example_chat.py
Normal file
26
examples/quick_start/srt_example_chat.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from sglang import function, system, user, assistant, gen, set_default_backend, Runtime
|
||||
|
||||
|
||||
@function
|
||||
def multi_turn_question(s, question_1, question_2):
|
||||
s += system("You are a helpful assistant.")
|
||||
s += user(question_1)
|
||||
s += assistant(gen("answer_1", max_tokens=256))
|
||||
s += user(question_2)
|
||||
s += assistant(gen("answer_2", max_tokens=256))
|
||||
|
||||
|
||||
runtime = Runtime(model_path="meta-llama/Llama-2-7b-chat-hf")
|
||||
#runtime = Runtime(model_path="mistralai/Mixtral-8x7B-Instruct-v0.1")
|
||||
set_default_backend(runtime)
|
||||
|
||||
state = multi_turn_question.run(
|
||||
question_1="What is the capital of the United States?",
|
||||
question_2="List two local attractions.",
|
||||
)
|
||||
|
||||
for m in state.messages():
|
||||
print(m["role"], ":", m["content"])
|
||||
|
||||
|
||||
runtime.shutdown()
|
||||
28
examples/quick_start/srt_example_complete.py
Normal file
28
examples/quick_start/srt_example_complete.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from sglang import function, gen, set_default_backend, Runtime
|
||||
|
||||
|
||||
@function
|
||||
def few_shot_qa(s, question):
|
||||
s += (
|
||||
"""The following are questions with answers.
|
||||
Q: What is the capital of France?
|
||||
A: Paris
|
||||
Q: What is the capital of Germany?
|
||||
A: Berlin
|
||||
Q: What is the capital of Italy?
|
||||
A: Rome
|
||||
""")
|
||||
s += "Q: " + question + "\n"
|
||||
s += "A:" + gen("answer", stop="\n", temperature=0)
|
||||
|
||||
|
||||
runtime = Runtime(model_path="meta-llama/Llama-2-7b-chat-hf")
|
||||
set_default_backend(runtime)
|
||||
|
||||
state = few_shot_qa.run(question="What is the capital of the United States?")
|
||||
|
||||
answer = state["answer"].strip().lower()
|
||||
assert "washington" in answer, f"answer: {state['answer']}"
|
||||
print(state.text())
|
||||
|
||||
runtime.shutdown()
|
||||
21
examples/quick_start/srt_example_regex.py
Normal file
21
examples/quick_start/srt_example_regex.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from sglang import function, gen, set_default_backend, Runtime
|
||||
|
||||
|
||||
@function
|
||||
def regex_gen(s):
|
||||
s += "Q: What is the IP address of the Google DNS servers?\n"
|
||||
s += "A: " + gen(
|
||||
"answer",
|
||||
temperature=0,
|
||||
regex=r"((25[0-5]|2[0-4]\d|[01]?\d\d?).){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)",
|
||||
)
|
||||
|
||||
|
||||
runtime = Runtime(model_path="meta-llama/Llama-2-7b-chat-hf")
|
||||
set_default_backend(runtime)
|
||||
|
||||
state = regex_gen.run()
|
||||
|
||||
print(state.text())
|
||||
|
||||
runtime.shutdown()
|
||||
26
examples/quick_start/srt_example_stream.py
Normal file
26
examples/quick_start/srt_example_stream.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from sglang import function, system, user, assistant, gen, set_default_backend, Runtime
|
||||
|
||||
|
||||
@function
|
||||
def multi_turn_question(s, question_1, question_2):
|
||||
s += system("You are a helpful assistant.")
|
||||
s += user(question_1)
|
||||
s += assistant(gen("answer_1", max_tokens=256))
|
||||
s += user(question_2)
|
||||
s += assistant(gen("answer_2", max_tokens=256))
|
||||
|
||||
runtime = Runtime("meta-llama/Llama-2-7b-chat-hf")
|
||||
set_default_backend(runtime)
|
||||
|
||||
state = multi_turn_question.run(
|
||||
question_1="What is the capital of the United States?",
|
||||
question_2="List two local attractions.",
|
||||
temperature=0,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
for out in state.text_iter():
|
||||
print(out, end="", flush=True)
|
||||
print()
|
||||
|
||||
runtime.shutdown()
|
||||
Reference in New Issue
Block a user