Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 9 additions & 4 deletions tensorrt_llm/_torch/models/modeling_deepseekv3.py
Original file line number Diff line number Diff line change
Expand Up @@ -1331,13 +1331,13 @@ def _run_MoE(hidden_states, hidden_states_fp4, do_finalize):
hidden_states, residual = self.moe_allreduce(
fc2_output, all_reduce_params=moe_all_reduce_params)
else:
if spec_metadata is not None and spec_metadata.is_layer_capture(
self.layer_idx):
spec_metadata.maybe_capture_hidden_states(
self.layer_idx, hidden_states, residual)
if self.next_layer_layernorm is not None:
hidden_states, residual = self.next_layer_layernorm(
hidden_states, residual)
if spec_metadata is not None and spec_metadata.is_layer_capture(
self.layer_idx):
spec_metadata.maybe_capture_hidden_states(
self.layer_idx, hidden_states, None)

return hidden_states, residual

Expand Down Expand Up @@ -1455,6 +1455,7 @@ def forward(
embed_tokens: Embedding,
attn_metadata: AttentionMetadata,
all_rank_num_tokens: Optional[List[int]] = None,
spec_metadata: Optional[SpecMetadata] = None,
**kwargs,
) -> torch.Tensor:

Expand Down Expand Up @@ -1531,6 +1532,10 @@ def norm_hidden():
else:
hidden_states, _ = self.shared_head.norm(hidden_states, residual)

# It's for 2-model path, capture the hidden states
if spec_metadata is not None:
spec_metadata.maybe_capture_hidden_states(0, hidden_states, None)

return hidden_states


Expand Down
3 changes: 3 additions & 0 deletions tensorrt_llm/_torch/models/modeling_speculative.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,6 +455,7 @@ def forward(
hidden_states: torch.Tensor,
attn_metadata: AttentionMetadata,
all_rank_num_tokens: Optional[List[int]] = None,
spec_metadata: Optional[SpecMetadata] = None,
**kwargs,
) -> Tuple[torch.Tensor, torch.Tensor]:
hidden_states = self.layers(
Expand All @@ -464,6 +465,7 @@ def forward(
embed_tokens=self.embed_tokens,
attn_metadata=attn_metadata,
all_rank_num_tokens=all_rank_num_tokens,
spec_metadata=spec_metadata,
)

return hidden_states
Expand Down Expand Up @@ -518,6 +520,7 @@ def forward(self,
hidden_states=hidden_states,
attn_metadata=attn_metadata,
all_rank_num_tokens=attn_metadata.all_rank_num_tokens,
spec_metadata=spec_metadata,
**kwargs)
return self.logits_processor.forward(
output,
Expand Down
Loading