@@ -61,6 +61,7 @@ def json_to_labeled_instances(self, inputs: JsonDict) -> List[Instance]:
61
61
"""
62
62
63
63
instance = self ._json_to_instance (inputs )
64
+ self ._dataset_reader .apply_token_indexers (instance )
64
65
outputs = self ._model .forward_on_instance (instance )
65
66
new_instances = self .predictions_to_labeled_instances (instance , outputs )
66
67
return new_instances
@@ -98,6 +99,9 @@ def get_gradients(self, instances: List[Instance]) -> Tuple[Dict[str, Any], Dict
98
99
embedding_gradients : List [Tensor ] = []
99
100
hooks : List [RemovableHandle ] = self ._register_embedding_gradient_hooks (embedding_gradients )
100
101
102
+ for instance in instances :
103
+ self ._dataset_reader .apply_token_indexers (instance )
104
+
101
105
dataset = Batch (instances )
102
106
dataset .index_instances (self ._model .vocab )
103
107
dataset_tensor_dict = util .move_to_device (dataset .as_tensor_dict (), self .cuda_device )
@@ -181,6 +185,7 @@ def _add_output(mod, _, outputs):
181
185
hook .remove ()
182
186
183
187
def predict_instance (self , instance : Instance ) -> JsonDict :
188
+ self ._dataset_reader .apply_token_indexers (instance )
184
189
outputs = self ._model .forward_on_instance (instance )
185
190
return sanitize (outputs )
186
191
@@ -212,6 +217,8 @@ def predict_batch_json(self, inputs: List[JsonDict]) -> List[JsonDict]:
212
217
return self .predict_batch_instance (instances )
213
218
214
219
def predict_batch_instance (self , instances : List [Instance ]) -> List [JsonDict ]:
220
+ for instance in instances :
221
+ self ._dataset_reader .apply_token_indexers (instance )
215
222
outputs = self ._model .forward_on_instances (instances )
216
223
return sanitize (outputs )
217
224
0 commit comments