Skip to content
This repository was archived by the owner on Dec 16, 2022. It is now read-only.

Commit 3de6943

Browse files
beneyalDeNeutoy
authored andcommitted
Avoid deprecation warnings (#1861)
1 parent a7da2ab commit 3de6943

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

allennlp/models/reading_comprehension/dialog_qa.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -270,10 +270,10 @@ def forward(self, # type: ignore
270270
repeated_passage_mask))
271271
self_attention_matrix = self._self_attention(residual_layer, residual_layer)
272272

273-
mask = repeated_passage_mask.resize(total_qa_count, passage_length, 1) \
274-
* repeated_passage_mask.resize(total_qa_count, 1, passage_length)
273+
mask = repeated_passage_mask.reshape(total_qa_count, passage_length, 1) \
274+
* repeated_passage_mask.reshape(total_qa_count, 1, passage_length)
275275
self_mask = torch.eye(passage_length, passage_length, device=self_attention_matrix.device)
276-
self_mask = self_mask.resize(1, passage_length, passage_length)
276+
self_mask = self_mask.reshape(1, passage_length, passage_length)
277277
mask = mask * (1 - self_mask)
278278

279279
self_attention_probs = util.masked_softmax(self_attention_matrix, mask)

allennlp/tests/nn/chu_liu_edmonds_test.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,6 @@ def test_mst(self):
7575
decode_mst(energy, 5, has_labels=False)
7676

7777
def test_mst_finds_maximum_spanning_tree(self):
78-
energy = torch.range(1, 9).view(1, 3, 3)
78+
energy = torch.arange(1, 10).view(1, 3, 3)
7979
heads, _ = decode_mst(energy.numpy(), 3) # pylint: disable=protected-access
8080
assert list(heads) == [-1, 2, 0]

0 commit comments

Comments
 (0)