Remove the setting of _attn_implementation from llama_bidirectional_model
#3
by
nvidia-oliver-holworthy
- opened
llama_bidirectional_model.py
CHANGED
|
@@ -40,7 +40,6 @@ class LlamaBidirectionalModel(LlamaModel):
|
|
| 40 |
super().__init__(config)
|
| 41 |
for layer in self.layers:
|
| 42 |
layer.self_attn.is_causal = False
|
| 43 |
-
self.config._attn_implementation = "eager"
|
| 44 |
|
| 45 |
def _update_causal_mask(
|
| 46 |
self,
|
|
|
|
| 40 |
super().__init__(config)
|
| 41 |
for layer in self.layers:
|
| 42 |
layer.self_attn.is_causal = False
|
|
|
|
| 43 |
|
| 44 |
def _update_causal_mask(
|
| 45 |
self,
|