@@ -47,6 +47,10 @@ class PretrainedTransformerBackbone(Backbone):
47
47
When `True` (the default), only the final layer of the pretrained transformer is taken
48
48
for the embeddings. But if set to `False`, a scalar mix of all of the layers
49
49
is used.
50
+ tokenizer_kwargs: `Dict[str, Any]`, optional (default = `None`)
51
+ Dictionary with
52
+ [additional arguments](https://github.com/huggingface/transformers/blob/155c782a2ccd103cf63ad48a2becd7c76a7d2115/transformers/tokenization_utils.py#L691)
53
+ for `AutoTokenizer.from_pretrained`.
50
54
transformer_kwargs: `Dict[str, Any]`, optional (default = `None`)
51
55
Dictionary with
52
56
[additional arguments](https://github.com/huggingface/transformers/blob/155c782a2ccd103cf63ad48a2becd7c76a7d2115/transformers/modeling_utils.py#L253)
@@ -72,6 +76,7 @@ def __init__(
72
76
last_layer_only : bool = True ,
73
77
override_weights_file : Optional [str ] = None ,
74
78
override_weights_strip_prefix : Optional [str ] = None ,
79
+ tokenizer_kwargs : Optional [Dict [str , Any ]] = None ,
75
80
transformer_kwargs : Optional [Dict [str , Any ]] = None ,
76
81
output_token_strings : bool = True ,
77
82
vocab_namespace : str = "tags" ,
@@ -87,6 +92,7 @@ def __init__(
87
92
last_layer_only = last_layer_only ,
88
93
override_weights_file = override_weights_file ,
89
94
override_weights_strip_prefix = override_weights_strip_prefix ,
95
+ tokenizer_kwargs = tokenizer_kwargs ,
90
96
transformer_kwargs = transformer_kwargs ,
91
97
)
92
98
self ._output_token_strings = output_token_strings
0 commit comments