2
2
import logging
3
3
from asyncio import Future
4
4
from collections import Counter , defaultdict
5
- from dataclasses import dataclass , field
5
+ from dataclasses import dataclass , field , replace
6
6
from pathlib import Path
7
7
from time import time
8
8
from typing import AsyncIterable , Callable , Tuple
@@ -71,33 +71,36 @@ async def get(
71
71
raise ValueError (f"{ key } was never yielded by { factory } " )
72
72
return fut .result ()
73
73
74
- def generate_index (self , timestamp : int | None ) -> Index :
74
+ def generate_index (
75
+ self , timestamp : int | None , skip_hydration : bool = False
76
+ ) -> Index :
75
77
max_counts = 5
76
78
latest_project_mtimes : list [Tuple [ProjectName , float ]] = []
77
79
all_project_names : list [ProjectName ] = []
78
80
dep_counts : Counter [str ] = Counter ()
79
81
80
- for item in self .directory .iterdir ():
81
- if item .suffix != ".json" or not item .is_file ():
82
- continue
83
- name = ProjectName (item .stem )
84
- all_project_names .append (name )
85
- if name not in self ._cache :
86
- self ._load_from_disk (name )
87
- project = self ._cache [name ].result ()
88
-
89
- for dep in project .metadata .dependencies :
90
- dep_counts [dep ] += 1
91
-
92
- mtime = project .metadata .upload_time
93
- if len (latest_project_mtimes ) < max_counts :
94
- latest_project_mtimes .append ((name , mtime ))
95
- continue
96
- for i in reversed (range (len (latest_project_mtimes ))):
97
- if mtime < latest_project_mtimes [i ][1 ]:
98
- latest_project_mtimes .insert (i + 1 , (name , mtime ))
99
- while len (latest_project_mtimes ) > max_counts :
100
- latest_project_mtimes .pop ()
82
+ if not skip_hydration :
83
+ for item in self .directory .iterdir ():
84
+ if item .suffix != ".json" or not item .is_file ():
85
+ continue
86
+ name = ProjectName (item .stem )
87
+ all_project_names .append (name )
88
+ if name not in self ._cache :
89
+ self ._load_from_disk (name )
90
+ project = self ._cache [name ].result ()
91
+
92
+ for dep in project .metadata .dependencies :
93
+ dep_counts [dep ] += 1
94
+
95
+ mtime = project .metadata .upload_time
96
+ if len (latest_project_mtimes ) < max_counts :
97
+ latest_project_mtimes .append ((name , mtime ))
98
+ continue
99
+ for i in reversed (range (len (latest_project_mtimes ))):
100
+ if mtime < latest_project_mtimes [i ][1 ]:
101
+ latest_project_mtimes .insert (i + 1 , (name , mtime ))
102
+ while len (latest_project_mtimes ) > max_counts :
103
+ latest_project_mtimes .pop ()
101
104
102
105
latest_projects = [
103
106
self ._cache [name ].result ().metadata for name , _ in latest_project_mtimes
@@ -120,3 +123,20 @@ def generate_index(self, timestamp: int | None) -> Index:
120
123
def write_index (self , timestamp : int | None = None ) -> None :
121
124
metadata = self .directory / METADATA_FILENAME
122
125
metadata .write_text (converter .dumps (self .generate_index (timestamp )))
126
+
127
+ def update_index (self ) -> None :
128
+ metadata = self .directory / METADATA_FILENAME
129
+ index = converter .loads (metadata .read_text (), Index )
130
+ new_index = self .generate_index (int (time ()), skip_hydration = True )
131
+ index = replace (
132
+ index ,
133
+ generated_at = new_index .generated_at ,
134
+ latest_projects = sorted (
135
+ index .latest_projects + new_index .latest_projects ,
136
+ key = lambda m : - m .upload_time ,
137
+ ),
138
+ all_project_names = sorted (
139
+ {* index .all_project_names , * new_index .all_project_names }
140
+ ),
141
+ )
142
+ metadata .write_text (converter .dumps (index ))
0 commit comments