Fix torch profiler bugs for bench_offline_throughput.py (#6557)

This commit is contained in:
Yueyang Pan
2025-06-09 20:33:41 +08:00
committed by GitHub
parent 451ffe74d9
commit 98c00a2df1
5 changed files with 49 additions and 5 deletions

View File

@@ -85,6 +85,22 @@ class RuntimeEndpoint(BaseBackend):
)
self._assert_success(res)
def start_profile(self):
res = http_request(
self.base_url + "/start_profile",
api_key=self.api_key,
verify=self.verify,
)
self._assert_success(res)
def stop_profile(self):
res = http_request(
self.base_url + "/stop_profile",
api_key=self.api_key,
verify=self.verify,
)
self._assert_success(res)
def commit_lazy_operations(self, s: StreamExecutor):
data = {"text": s.text_, "sampling_params": {"max_new_tokens": 0}}
self._add_images(s, data)
@@ -374,7 +390,8 @@ class Runtime:
self.pid = None
pipe_reader, pipe_writer = multiprocessing.Pipe(duplex=False)
proc = multiprocessing.Process(
ctx = multiprocessing.get_context("spawn")
proc = ctx.Process(
target=launch_server,
args=(self.server_args, pipe_writer),
)
@@ -406,6 +423,12 @@ class Runtime:
kill_process_tree(self.pid)
self.pid = None
def start_profile(self):
self.endpoint.start_profile()
def stop_profile(self):
self.endpoint.stop_profile()
def cache_prefix(self, prefix: str):
self.endpoint.cache_prefix(prefix)