Add float16 and float32 (#1115)

This commit is contained in:
Woosuk Kwon 2023-09-21 00:52:47 -07:00 committed by GitHub
parent 2ac4d5e2bf
commit 1ac4ccf73c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -93,7 +93,9 @@ class EngineArgs:
'--dtype',
type=str,
default=EngineArgs.dtype,
choices=['auto', 'half', 'bfloat16', 'float'],
choices=[
'auto', 'half', 'float16', 'bfloat16', 'float', 'float32'
],
help='data type for model weights and activations. '
'The "auto" option will use FP16 precision '
'for FP32 and FP16 models, and BF16 precision '