fix(backend): buggy reply_time_by_emotion metric
This metric was never stastically significant and held no real value. It also so happened to hold accidental NaN values in the dataframe which broke the frontend. Happy to remove.
This commit is contained in:
@@ -96,10 +96,7 @@ class StatGen:
|
|||||||
"common_three_phrases": self.linguistic_analysis.ngrams(filtered_df, n=3),
|
"common_three_phrases": self.linguistic_analysis.ngrams(filtered_df, n=3),
|
||||||
"average_emotion_by_topic": self.emotional_analysis.avg_emotion_by_topic(
|
"average_emotion_by_topic": self.emotional_analysis.avg_emotion_by_topic(
|
||||||
filtered_df
|
filtered_df
|
||||||
),
|
)
|
||||||
"reply_time_by_emotion": self.temporal_analysis.avg_reply_time_per_emotion(
|
|
||||||
filtered_df
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_user_analysis(self, df: pd.DataFrame, filters: dict | None = None) -> dict:
|
def get_user_analysis(self, df: pd.DataFrame, filters: dict | None = None) -> dict:
|
||||||
|
|||||||
Reference in New Issue
Block a user