@@ -132,6 +132,7 @@ def get_cached_ancestors(content_id):
132
132
133
133
134
134
def get_max_ancestor_depth ():
135
+ """Returns one less than the maximum depth of the ancestors of all content nodes"""
135
136
max_depth = 0
136
137
content_ids = ContentSummaryLog .objects .values_list ("content_id" , flat = True )
137
138
nodes = ContentNode .objects .filter (content_id__in = content_ids ).only (
@@ -142,10 +143,11 @@ def get_max_ancestor_depth():
142
143
# cache it here so the retireival while adding ancestors info into csv is faster
143
144
add_content_to_cache (node .content_id , title = node .title , ancestors = ancestors )
144
145
max_depth = max (max_depth , len (ancestors ))
145
- return max_depth
146
+ return max_depth - 1
146
147
147
148
148
149
def add_ancestors_info (row , ancestors , max_depth ):
150
+ ancestors = ancestors [1 :]
149
151
row .update (
150
152
{
151
153
f"Topic level { level + 1 } " : ancestors [level ]["title" ]
@@ -239,7 +241,10 @@ def csv_file_generator(
239
241
for i in range (get_max_ancestor_depth ())
240
242
]
241
243
242
- header_labels += [label for _ , label in topic_headers ]
244
+ content_id_index = header_labels .index (labels ["content_id" ])
245
+ header_labels [content_id_index :content_id_index ] = [
246
+ label for _ , label in topic_headers
247
+ ]
243
248
244
249
csv_file = open_csv_for_writing (filepath )
245
250
0 commit comments