We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 89bf1a0 commit f25d596Copy full SHA for f25d596
probabilistic_word_embeddings/utils.py
@@ -130,3 +130,19 @@ def normalize_rotation(e, words):
130
131
e_new[vocabulary] = (Q.T @ e[vocabulary].numpy().T).T
132
return e_new
133
+
134
+def posterior_mean(paths):
135
+ emb_paths = sorted(paths)
136
+ e_ref = Embedding(saved_model_path=emb_paths[-1])
137
+ words_reference = [f"{wd}_c" for wd in list(e_ref.vocabulary) if "_c" not in wd]
138
139
+ e_mean = Embedding(saved_model_path=emb_paths[-1])
140
+ e_mean.theta = e_mean.theta * 0.0
141
142
+ for emb_path in emb_paths:
143
+ e = Embedding(saved_model_path=emb_path)
144
+ e_aligned = align(e_ref, e, words_reference)
145
+ e_mean.theta += e_aligned.theta / len(emb_paths)
146
147
+ return e_mean
148
0 commit comments