Skip to content

Commit e7f41ca

Browse files
blueswhenniushengxiao
andauthored
fix: remove python310 feature (#963)
Co-authored-by: niushengxiao <niushengxiao@sensetime.com>
1 parent 6c73ff3 commit e7f41ca

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

lightllm/server/api_openai.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -386,7 +386,7 @@ async def completions_impl(request: CompletionRequest, raw_request: Request) ->
386386

387387

388388
async def _process_prompts_completion(
389-
prompts: List[str] | List[List[int]],
389+
prompts: Union[List[str], List[List[int]]],
390390
sampling_params: SamplingParams,
391391
sampling_params_dict: Dict,
392392
multimodal_params: MultimodalParams,
@@ -411,7 +411,7 @@ async def _process_prompts_completion(
411411
prompts[0], sampling_params, multimodal_params, raw_request, request, created_time
412412
)
413413

414-
async def process_single_prompt(prompt: str | List[int], prompt_index: int):
414+
async def process_single_prompt(prompt: Union[str, List[int]], prompt_index: int):
415415
if len(prompts) > 1:
416416
individual_sampling_params = SamplingParams()
417417
individual_sampling_params.init(tokenizer=g_objs.httpserver_manager.tokenizer, **sampling_params_dict)
@@ -437,7 +437,7 @@ async def process_single_prompt(prompt: str | List[int], prompt_index: int):
437437

438438

439439
async def _handle_streaming_completion(
440-
prompt: str | List[int],
440+
prompt: Union[str, List[int]],
441441
sampling_params: SamplingParams,
442442
multimodal_params: MultimodalParams,
443443
raw_request: Request,

0 commit comments

Comments
 (0)