-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathretrieval_colbert_retrieval.py
78 lines (64 loc) · 2.19 KB
/
retrieval_colbert_retrieval.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "colbert-ai",
# "faiss-cpu",
# "numpy==1.24.1",
# "sentence-transformers==2.7.0",
# "setuptools",
# "simple-parsing",
# "torch==1.13.1",
# "transformers==4.40.0",
# ]
#
# [tool.uv.sources]
# colbert-ai = { git = "https://github.com/timbmg/ColBERT" }
# ///
import logging
from dataclasses import dataclass, field
from logging.config import fileConfig
from pathlib import Path
from typing import Literal
import simple_parsing
from colbert import Searcher
from colbert.data import Queries
from colbert.infra import ColBERTConfig, Run, RunConfig
from tqdm.auto import tqdm
from tqdm.contrib.logging import logging_redirect_tqdm
from peerqa.utils import url_save_hash, url_save_str
fileConfig("logging.ini")
logger = logging.getLogger(__name__)
@dataclass
class Args:
output_dir: Path = field(default=Path("out"))
granularity: Literal["sentences", "paragraphs"] = "sentences"
template: str = None
def main(args):
subdir = f"colbert-{args.granularity}"
if args.template is not None:
template_hash = url_save_hash(args.template)
logger.info(f"Adding template hash {template_hash} to subdir.")
subdir += f"-{template_hash}"
experiment_dir = str(args.output_dir / subdir)
query_files = list((args.output_dir / subdir).glob("*/queries.tsv"))
for query_file in tqdm(query_files, ncols=80):
paper_id = str(query_file.parts[-2])
index_path = Path(
f"{experiment_dir}/{paper_id}/indexes/paper.nbits=2/ivf.pid.pt"
)
assert index_path.exists(), index_path
with Run().context(
RunConfig(nranks=1, root=experiment_dir, experiment=paper_id)
):
config = ColBERTConfig(
root=experiment_dir,
)
searcher = Searcher(index="paper.nbits=2", config=config)
queries = Queries(str(query_file))
ranking = searcher.search_all(queries, k=10000)
ranking.save("paper.nbits=2.ranking.tsv")
if __name__ == "__main__":
args, _ = simple_parsing.parse_known_args(Args)
with logging_redirect_tqdm():
logger.info(args)
main(args)