Skip to content

Commit d26a5a9

Browse files
chzblychTabrizian
andauthored
[https://nvbugs/5451296][bug] Cherry-pick #7017 from release/1.0 branch (#7043)
Signed-off-by: Iman Tabrizian <[email protected]> Co-authored-by: Iman Tabrizian <[email protected]>
1 parent e07fcc3 commit d26a5a9

File tree

1 file changed

+14
-17
lines changed

1 file changed

+14
-17
lines changed

tests/unittest/llmapi/test_llm_args.py

Lines changed: 14 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -428,23 +428,20 @@ class TestTorchLlmArgs:
428428

429429
@print_traceback_on_error
430430
def test_runtime_sizes(self):
431-
llm = TorchLLM(
432-
llama_model_path,
433-
max_beam_width=1,
434-
max_num_tokens=256,
435-
max_seq_len=128,
436-
max_batch_size=8,
437-
)
438-
439-
assert llm.args.max_beam_width == 1
440-
assert llm.args.max_num_tokens == 256
441-
assert llm.args.max_seq_len == 128
442-
assert llm.args.max_batch_size == 8
443-
444-
assert llm._executor_config.max_beam_width == 1
445-
assert llm._executor_config.max_num_tokens == 256
446-
assert llm._executor_config.max_seq_len == 128
447-
assert llm._executor_config.max_batch_size == 8
431+
with TorchLLM(llama_model_path,
432+
max_beam_width=1,
433+
max_num_tokens=256,
434+
max_seq_len=128,
435+
max_batch_size=8) as llm:
436+
assert llm.args.max_beam_width == 1
437+
assert llm.args.max_num_tokens == 256
438+
assert llm.args.max_seq_len == 128
439+
assert llm.args.max_batch_size == 8
440+
441+
assert llm._executor_config.max_beam_width == 1
442+
assert llm._executor_config.max_num_tokens == 256
443+
assert llm._executor_config.max_seq_len == 128
444+
assert llm._executor_config.max_batch_size == 8
448445

449446
def test_dynamic_setattr(self):
450447
with pytest.raises(pydantic_core._pydantic_core.ValidationError):

0 commit comments

Comments
 (0)