2024-06-22 00:45:33 -07:00
|
|
|
"""
|
|
|
|
|
Usage:
|
2024-12-08 23:30:51 -08:00
|
|
|
python3 -m sglang.launch_server --model-path /model/llama-classification --is-embedding --disable-radix-cache
|
2024-06-25 03:38:04 -07:00
|
|
|
|
2024-06-22 00:45:33 -07:00
|
|
|
python3 test_httpserver_classify.py
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
|
|
|
2024-12-08 23:30:51 -08:00
|
|
|
def get_logits_deprecated(url: str, prompt: str):
|
2024-06-22 00:45:33 -07:00
|
|
|
response = requests.post(
|
|
|
|
|
url + "/generate",
|
|
|
|
|
json={
|
|
|
|
|
"text": prompt,
|
|
|
|
|
"sampling_params": {
|
|
|
|
|
"max_new_tokens": 0,
|
|
|
|
|
},
|
|
|
|
|
"return_logprob": True,
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
return response.json()["meta_info"]["normalized_prompt_logprob"]
|
|
|
|
|
|
|
|
|
|
|
2024-12-08 23:30:51 -08:00
|
|
|
def get_logits_batch_deprecated(url: str, prompts: list[str]):
|
2024-06-22 00:45:33 -07:00
|
|
|
response = requests.post(
|
|
|
|
|
url + "/generate",
|
|
|
|
|
json={
|
|
|
|
|
"text": prompts,
|
|
|
|
|
"sampling_params": {
|
|
|
|
|
"max_new_tokens": 0,
|
|
|
|
|
},
|
|
|
|
|
"return_logprob": True,
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
ret = response.json()
|
2024-07-05 10:06:17 -07:00
|
|
|
logits = np.array(
|
|
|
|
|
list(
|
|
|
|
|
ret[i]["meta_info"]["normalized_prompt_logprob"]
|
|
|
|
|
for i in range(len(prompts))
|
|
|
|
|
)
|
|
|
|
|
)
|
2024-06-22 00:45:33 -07:00
|
|
|
return logits
|
|
|
|
|
|
|
|
|
|
|
2024-12-08 23:30:51 -08:00
|
|
|
def get_logits(url: str, prompt: str):
|
|
|
|
|
response = requests.post(
|
|
|
|
|
url + "/classify",
|
|
|
|
|
json={"text": prompt},
|
|
|
|
|
)
|
|
|
|
|
return response.json()["embedding"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_logits_batch(url: str, prompts: list[str]):
|
|
|
|
|
response = requests.post(
|
|
|
|
|
url + "/classify",
|
|
|
|
|
json={"text": prompts},
|
|
|
|
|
)
|
|
|
|
|
return np.array([x["embedding"] for x in response.json()])
|
|
|
|
|
|
|
|
|
|
|
2024-06-22 00:45:33 -07:00
|
|
|
if __name__ == "__main__":
|
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
|
parser.add_argument("--host", type=str, default="http://127.0.0.1")
|
|
|
|
|
parser.add_argument("--port", type=int, default=30000)
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
url = f"{args.host}:{args.port}"
|
|
|
|
|
|
|
|
|
|
# A single request
|
|
|
|
|
prompt = "This is a test prompt.<|eot_id|>"
|
|
|
|
|
logits = get_logits(url, prompt)
|
|
|
|
|
print(f"{logits=}")
|
|
|
|
|
|
|
|
|
|
# A batch of requests
|
|
|
|
|
prompts = [
|
|
|
|
|
"This is a test prompt.<|eot_id|>",
|
|
|
|
|
"This is another test prompt.<|eot_id|>",
|
|
|
|
|
"This is a long long long long test prompt.<|eot_id|>",
|
|
|
|
|
]
|
|
|
|
|
logits = get_logits_batch(url, prompts)
|
2024-07-05 10:06:17 -07:00
|
|
|
print(f"{logits=}")
|