diff --git a/src/backend/management/commands/create_mongo_index.py b/src/backend/management/commands/create_mongo_index.py
index ebb906ef52c8969fd2625e2a3ca6bfcd7093fb8e..a5edf0ad599c14005249e17321f6e5c5948ca930 100644
--- a/src/backend/management/commands/create_mongo_index.py
+++ b/src/backend/management/commands/create_mongo_index.py
@@ -1,27 +1,36 @@
 from django.core.management.base import BaseCommand
-from pymongo import MongoClient, ASCENDING, DESCENDING, HASHED
+from pymongo import ASCENDING, DESCENDING, HASHED
 from backend.utils import lrs_db
 
 class Command(BaseCommand):
-    help = "Create indexes for the MongoDB 'results' collection"
+    help = "Create indexes for the MongoDB 'results' and 'statement' collections"
 
     def handle(self, *args, **kwargs):
-        # Verbindung zur MongoDB herstellen
+        # Define collections and their indexes
+        collections_indexes = {
+            "results": [
+                {"key": [("_id", ASCENDING)], "name": "_id_"},
+                {"key": [("context_id", HASHED)], "name": "context_id_hashed"},
+                {"key": [("name", ASCENDING)], "name": "name_1"},
+                {"key": [("created_at", DESCENDING)], "name": "created_at_-1"},
+                {"key": [("name", ASCENDING), ("created_at", DESCENDING)], "name": "name_1_created_at_-1"},
+            ],
+            "statement": [
+                {"key": [("_id", ASCENDING)], "name": "_id_"},
+                {"key": [("timestamp", ASCENDING)], "name": "timestamp_1"},
+                {"key": [("stored", DESCENDING)], "name": "stored_-1"},
+                {"key": [("timestamp", ASCENDING), ("stored", DESCENDING)], "name": "timestamp_1_stored_-1"},
+            ]
+        }
 
-        collection = lrs_db["results"]
-
-        # Indexe definieren
-        indexes = [
-            {"key": [("_id", ASCENDING)], "name": "_id_"},
-            {"key": [("context_id", HASHED)], "name": "context_id_hashed"},
-            {"key": [("name", ASCENDING)], "name": "name_1"},
-            {"key": [("created_at", DESCENDING)], "name": "created_at_-1"},
-            {"key": [("name", ASCENDING), ("created_at", DESCENDING)], "name": "name_1_created_at_-1"},
-        ]
-
-        # Indexe erstellen
-        for index in indexes:
-            collection.create_index(index["key"], name=index["name"])
-            self.stdout.write(self.style.SUCCESS(f"Index '{index['name']}' created successfully"))
-
-        self.stdout.write(self.style.SUCCESS("All indexes have been created!"))
+        # Iterate through each collection and create indexes
+        for collection_name, indexes in collections_indexes.items():
+            collection = lrs_db[collection_name]
+            self.stdout.write(self.style.SUCCESS(f"Creating indexes for collection: {collection_name}"))
+            for index in indexes:
+                collection.create_index(index["key"], name=index["name"])
+                self.stdout.write(self.style.SUCCESS(
+                    f"Index '{index['name']}' created successfully in collection '{collection_name}'"
+                ))
+        
+        self.stdout.write(self.style.SUCCESS("All indexes have been created for both collections!"))
diff --git a/src/xapi/views.py b/src/xapi/views.py
index c198fb8f6592fa7d752ae1040e427d72f14e8014..e5420efb80c71da0327ff17e64d3fbdb33b377cf 100644
--- a/src/xapi/views.py
+++ b/src/xapi/views.py
@@ -514,68 +514,75 @@ class CreateTANStatement(APIView):
                 status=status.HTTP_200_OK,
             )
 
-
-
 class StatisticView(APIView):
 
-    def get(self,request):
-
+    def get(self, request):
         # Get the user count from Django's user model
         user_count = CustomUser.objects.count()
 
-        collection = lrs_db["statements"]
-        statement_count = collection.count_documents({})  # Count all documents in the collection
-
-        result_collection = lrs_db["results"]
-        result_count = result_collection.count_documents({})  # Count all documents in the collection
-
-       # Get statement history (count per day for the last 7 days)
-        end_date = datetime.datetime.utcnow()
-        start_date = (end_date - timedelta(days=7))
-        
+        # Use estimated_document_count for fast counts
+        statements_collection = lrs_db["statements"]
+        statement_count = statements_collection.estimated_document_count()
+        results_collection = lrs_db["results"]
+        result_count = results_collection.estimated_document_count()
+
+        # Calculate the date range for the last 7 days
+        end_date = datetime.datetime.utcnow() 
+        start_date = end_date - timedelta(days=7)
+
+        # Aggregation pipeline:
+        # 1. Convert 'stored' to a Date (if needed)
+        # 2. Filter documents in the date range
+        # 3. Group by year, month, and day and sum counts
+        # 4. Sort by date
         pipeline = [
-    {
-        "$addFields": {
-            "stored_date": {"$toDate": "$stored"}
-        }
-    },
-    {
-        "$match": {
-            "stored_date": {"$gte": start_date, "$lte": end_date}
-        }
-    },
-    {
-        "$group": {
-            "_id": {
-                "year": {"$year": "$stored_date"},
-                "month": {"$month": "$stored_date"},
-                "day": {"$dayOfMonth": "$stored_date"},
+            {
+                "$addFields": {
+                    "stored_date": {"$toDate": "$stored"}
+                }
             },
-            "count": {"$sum": 1}
-        }
-    },
-    {"$sort": {"_id.year": 1, "_id.month": 1, "_id.day": 1}},
+            {
+                "$match": {
+                    "stored_date": {"$gte": start_date, "$lte": end_date}
+                }
+            },
+            {
+                "$group": {
+                    "_id": {
+                        "year": {"$year": "$stored_date"},
+                        "month": {"$month": "$stored_date"},
+                        "day": {"$dayOfMonth": "$stored_date"}
+                    },
+                    "count": {"$sum": 1}
+                }
+            },
+            {
+                "$sort": {
+                    "_id.year": 1,
+                    "_id.month": 1,
+                    "_id.day": 1
+                }
+            }
         ]
 
+        # Run the aggregation; using a dictionary comprehension to build a date->count map
+        history_data = {
+            f"{entry['_id']['year']}-{entry['_id']['month']:02d}-{entry['_id']['day']:02d}": entry["count"]
+            for entry in statements_collection.aggregate(pipeline)
+        }
 
-        history_data = list(collection.aggregate(pipeline))
-
-        date_counts = {f"{(start_date + timedelta(days=i)).strftime('%Y-%m-%d')}": 0 for i in range(7)}
-        
-        for entry in history_data:
-            key = f"{entry['_id']['year']}-{entry['_id']['month']:02d}-{entry['_id']['day']:02d}"
-            date_counts[key] = entry["count"]
-        
-        statement_history = list(date_counts.values())  
-
-        return JsonResponse(
-            {
-                "message": "statistics collected",
-                "user_count": user_count,
-                "statement_count": statement_count,
-                "result_count": result_count,
-                "statement_history": statement_history
-            },
-            safe=False,
-            status=status.HTTP_200_OK,
-        )
\ No newline at end of file
+        # Build a complete 7-day history (fill missing dates with 0)
+        date_counts = {}
+        for i in range(7):
+            date_str = (start_date + timedelta(days=i)).strftime('%Y-%m-%d')
+            date_counts[date_str] = history_data.get(date_str, 0)
+
+        statement_history = list(date_counts.values())
+
+        return JsonResponse({
+            "message": "statistics collected",
+            "user_count": user_count,
+            "statement_count": statement_count,
+            "result_count": result_count,
+            "statement_history": statement_history
+        }, safe=False, status=status.HTTP_200_OK)