fix(api): broken analysis calls due to overlap in attribute and method names

This commit is contained in:
2026-02-23 18:14:24 +00:00
parent 397986dc89
commit 8c76476cd3
2 changed files with 23 additions and 10 deletions

View File

@@ -55,7 +55,7 @@ def word_frequencies():
return jsonify({"error": "No data uploaded"}), 400 return jsonify({"error": "No data uploaded"}), 400
try: try:
return jsonify(stat_obj.content_analysis()), 200 return jsonify(stat_obj.get_content_analysis()), 200
except ValueError as e: except ValueError as e:
return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400 return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400
except Exception as e: except Exception as e:
@@ -80,7 +80,7 @@ def get_time_analysis():
return jsonify({"error": "No data uploaded"}), 400 return jsonify({"error": "No data uploaded"}), 400
try: try:
return jsonify(stat_obj.time_analysis()), 200 return jsonify(stat_obj.get_time_analysis()), 200
except ValueError as e: except ValueError as e:
return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400 return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400
except Exception as e: except Exception as e:
@@ -93,7 +93,7 @@ def get_user_analysis():
return jsonify({"error": "No data uploaded"}), 400 return jsonify({"error": "No data uploaded"}), 400
try: try:
return jsonify(stat_obj.user_analysis()), 200 return jsonify(stat_obj.get_user_analysis()), 200
except ValueError as e: except ValueError as e:
return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400 return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400
except Exception as e: except Exception as e:
@@ -106,13 +106,26 @@ def get_cultural_analysis():
return jsonify({"error": "No data uploaded"}), 400 return jsonify({"error": "No data uploaded"}), 400
try: try:
return jsonify(stat_obj.cultural_analysis()), 200 return jsonify(stat_obj.get_cultural_analysis()), 200
except ValueError as e: except ValueError as e:
return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400 return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400
except Exception as e: except Exception as e:
print(traceback.format_exc()) print(traceback.format_exc())
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500 return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500
@app.route("/stats/interaction", methods=["GET"])
def get_interaction_analysis():
if stat_obj is None:
return jsonify({"error": "No data uploaded"}), 400
try:
return jsonify(stat_obj.get_interactional_analysis()), 200
except ValueError as e:
return jsonify({"error": f"Malformed or missing data: {str(e)}"}), 400
except Exception as e:
print(traceback.format_exc())
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500
@app.route('/filter/search', methods=["POST"]) @app.route('/filter/search', methods=["POST"])
def search_dataset(): def search_dataset():
if stat_obj is None: if stat_obj is None:

View File

@@ -66,14 +66,14 @@ class StatGen:
# topics over time # topics over time
# emotions over time # emotions over time
def time_analysis(self) -> pd.DataFrame: def get_time_analysis(self) -> pd.DataFrame:
return { return {
"events_per_day": self.temporal_analysis.posts_per_day(), "events_per_day": self.temporal_analysis.posts_per_day(),
"weekday_hour_heatmap": self.temporal_analysis.heatmap() "weekday_hour_heatmap": self.temporal_analysis.heatmap()
} }
# average topic duration # average topic duration
def content_analysis(self) -> dict: def get_content_analysis(self) -> dict:
return { return {
"word_frequencies": self.linguistic_analysis.word_frequencies(), "word_frequencies": self.linguistic_analysis.word_frequencies(),
"common_two_phrases": self.linguistic_analysis.ngrams(), "common_two_phrases": self.linguistic_analysis.ngrams(),
@@ -84,7 +84,7 @@ class StatGen:
# average emotion per user # average emotion per user
# average chain length # average chain length
def user_analysis(self) -> dict: def get_user_analysis(self) -> dict:
return { return {
"top_users": self.interaction_analysis.top_users(), "top_users": self.interaction_analysis.top_users(),
"users": self.interaction_analysis.per_user_analysis(), "users": self.interaction_analysis.per_user_analysis(),
@@ -94,14 +94,14 @@ class StatGen:
# average / max thread depth # average / max thread depth
# high engagment threads based on volume # high engagment threads based on volume
def conversational_analysis(self) -> dict: def get_interactional_analysis(self) -> dict:
return { return {
"average_thread_depth": self.interaction_analysis.average_thread_depth(),
} }
# detect community jargon # detect community jargon
# in-group and out-group linguistic markers # in-group and out-group linguistic markers
def cultural_analysis(self) -> dict: def get_cultural_analysis(self) -> dict:
return { return {
"identity_markers": self.linguistic_analysis.identity_markers() "identity_markers": self.linguistic_analysis.identity_markers()
} }