Remove the setting of _attn_implementation from llama_bidirectional_model

#3
Files changed (1) hide show
  1. llama_bidirectional_model.py +0 -1
llama_bidirectional_model.py CHANGED
@@ -40,7 +40,6 @@ class LlamaBidirectionalModel(LlamaModel):
40
  super().__init__(config)
41
  for layer in self.layers:
42
  layer.self_attn.is_causal = False
43
- self.config._attn_implementation = "eager"
44
 
45
  def _update_causal_mask(
46
  self,
 
40
  super().__init__(config)
41
  for layer in self.layers:
42
  layer.self_attn.is_causal = False
 
43
 
44
  def _update_causal_mask(
45
  self,