-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathretrieval_pyserini_retrieval.py
70 lines (54 loc) · 1.8 KB
/
retrieval_pyserini_retrieval.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "pyserini",
# "simple-parsing",
# ]
# ///
import json
import logging
from dataclasses import dataclass, field
from logging.config import fileConfig
from pathlib import Path
from typing import Literal
import simple_parsing
from pyserini.search.lucene import LuceneSearcher
from tqdm.contrib.logging import logging_redirect_tqdm
from peerqa.data_loader import QuestionLoader
from peerqa.utils import url_save_str
fileConfig("logging.ini")
logger = logging.getLogger(__name__)
@dataclass
class Args:
qa_file: Path = field(default=Path("data/qa.jsonl"))
output_dir: Path = field(default=Path("out"))
granularity: Literal["sentences", "paragraphs"] = "sentences"
def main(args: Args):
qa_loader = QuestionLoader(args.qa_file)
run = {}
for paper_id, question_ids, questions in qa_loader.questions_with_answer_evidence():
if len(questions) == 0:
continue
index_dir = (
args.output_dir
/ "pyserini"
/ "indexes"
/ f"bm25-{args.granularity}"
/ url_save_str(paper_id)
)
index_dir.mkdir(parents=True, exist_ok=True)
searcher = LuceneSearcher(str(index_dir))
hits = searcher.batch_search(questions, qids=question_ids, k=1000)
for question_id, question_hits in hits.items():
run[question_id] = {}
for hit in question_hits:
run[question_id][hit.docid] = hit.score
with open(
args.output_dir / f"run-{args.granularity}-bm25-sparse.json", "w"
) as f:
json.dump(run, f, indent=2)
if __name__ == "__main__":
args, _ = simple_parsing.parse_known_args(Args)
with logging_redirect_tqdm():
logger.info(args)
main(args)