Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
R
Rights Engine
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Iterations
Wiki
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Polaris
Rights Engine
Commits
537c2a09
Commit
537c2a09
authored
2 months ago
by
Benjamin Ledel
Browse files
Options
Downloads
Patches
Plain Diff
* fix long loading of statistics
parent
f08938e8
No related branches found
No related tags found
No related merge requests found
Pipeline
#1636517
passed
2 months ago
Stage: test
Stage: build
Stage: deploy
Pipeline: FIRST_START
#1636521
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
src/backend/management/commands/create_mongo_index.py
+29
-20
29 additions, 20 deletions
src/backend/management/commands/create_mongo_index.py
src/xapi/views.py
+62
-55
62 additions, 55 deletions
src/xapi/views.py
with
91 additions
and
75 deletions
src/backend/management/commands/create_mongo_index.py
+
29
−
20
View file @
537c2a09
from
django.core.management.base
import
BaseCommand
from
pymongo
import
MongoClient
,
ASCENDING
,
DESCENDING
,
HASHED
from
pymongo
import
ASCENDING
,
DESCENDING
,
HASHED
from
backend.utils
import
lrs_db
class
Command
(
BaseCommand
):
help
=
"
Create indexes for the MongoDB
'
results
'
collection
"
help
=
"
Create indexes for the MongoDB
'
results
'
and
'
statement
'
collection
s
"
def
handle
(
self
,
*
args
,
**
kwargs
):
# Verbindung zur MongoDB herstellen
# Define collections and their indexes
collections_indexes
=
{
"
results
"
:
[
{
"
key
"
:
[(
"
_id
"
,
ASCENDING
)],
"
name
"
:
"
_id_
"
},
{
"
key
"
:
[(
"
context_id
"
,
HASHED
)],
"
name
"
:
"
context_id_hashed
"
},
{
"
key
"
:
[(
"
name
"
,
ASCENDING
)],
"
name
"
:
"
name_1
"
},
{
"
key
"
:
[(
"
created_at
"
,
DESCENDING
)],
"
name
"
:
"
created_at_-1
"
},
{
"
key
"
:
[(
"
name
"
,
ASCENDING
),
(
"
created_at
"
,
DESCENDING
)],
"
name
"
:
"
name_1_created_at_-1
"
},
],
"
statement
"
:
[
{
"
key
"
:
[(
"
_id
"
,
ASCENDING
)],
"
name
"
:
"
_id_
"
},
{
"
key
"
:
[(
"
timestamp
"
,
ASCENDING
)],
"
name
"
:
"
timestamp_1
"
},
{
"
key
"
:
[(
"
stored
"
,
DESCENDING
)],
"
name
"
:
"
stored_-1
"
},
{
"
key
"
:
[(
"
timestamp
"
,
ASCENDING
),
(
"
stored
"
,
DESCENDING
)],
"
name
"
:
"
timestamp_1_stored_-1
"
},
]
}
collection
=
lrs_db
[
"
results
"
]
# Indexe definieren
indexes
=
[
{
"
key
"
:
[(
"
_id
"
,
ASCENDING
)],
"
name
"
:
"
_id_
"
},
{
"
key
"
:
[(
"
context_id
"
,
HASHED
)],
"
name
"
:
"
context_id_hashed
"
},
{
"
key
"
:
[(
"
name
"
,
ASCENDING
)],
"
name
"
:
"
name_1
"
},
{
"
key
"
:
[(
"
created_at
"
,
DESCENDING
)],
"
name
"
:
"
created_at_-1
"
},
{
"
key
"
:
[(
"
name
"
,
ASCENDING
),
(
"
created_at
"
,
DESCENDING
)],
"
name
"
:
"
name_1_created_at_-1
"
},
]
# Indexe erstellen
for
index
in
indexes
:
collection
.
create_index
(
index
[
"
key
"
],
name
=
index
[
"
name
"
])
self
.
stdout
.
write
(
self
.
style
.
SUCCESS
(
f
"
Index
'
{
index
[
'
name
'
]
}
'
created successfully
"
))
self
.
stdout
.
write
(
self
.
style
.
SUCCESS
(
"
All indexes have been created!
"
))
# Iterate through each collection and create indexes
for
collection_name
,
indexes
in
collections_indexes
.
items
():
collection
=
lrs_db
[
collection_name
]
self
.
stdout
.
write
(
self
.
style
.
SUCCESS
(
f
"
Creating indexes for collection:
{
collection_name
}
"
))
for
index
in
indexes
:
collection
.
create_index
(
index
[
"
key
"
],
name
=
index
[
"
name
"
])
self
.
stdout
.
write
(
self
.
style
.
SUCCESS
(
f
"
Index
'
{
index
[
'
name
'
]
}
'
created successfully in collection
'
{
collection_name
}
'"
))
self
.
stdout
.
write
(
self
.
style
.
SUCCESS
(
"
All indexes have been created for both collections!
"
))
This diff is collapsed.
Click to expand it.
src/xapi/views.py
+
62
−
55
View file @
537c2a09
...
...
@@ -514,68 +514,75 @@ class CreateTANStatement(APIView):
status
=
status
.
HTTP_200_OK
,
)
class
StatisticView
(
APIView
):
def
get
(
self
,
request
):
def
get
(
self
,
request
):
# Get the user count from Django's user model
user_count
=
CustomUser
.
objects
.
count
()
collection
=
lrs_db
[
"
statements
"
]
statement_count
=
collection
.
count_documents
({})
# Count all documents in the collection
result_collection
=
lrs_db
[
"
results
"
]
result_count
=
result_collection
.
count_documents
({})
# Count all documents in the collection
# Get statement history (count per day for the last 7 days)
end_date
=
datetime
.
datetime
.
utcnow
()
start_date
=
(
end_date
-
timedelta
(
days
=
7
))
# Use estimated_document_count for fast counts
statements_collection
=
lrs_db
[
"
statements
"
]
statement_count
=
statements_collection
.
estimated_document_count
()
results_collection
=
lrs_db
[
"
results
"
]
result_count
=
results_collection
.
estimated_document_count
()
# Calculate the date range for the last 7 days
end_date
=
datetime
.
datetime
.
utcnow
()
start_date
=
end_date
-
timedelta
(
days
=
7
)
# Aggregation pipeline:
# 1. Convert 'stored' to a Date (if needed)
# 2. Filter documents in the date range
# 3. Group by year, month, and day and sum counts
# 4. Sort by date
pipeline
=
[
{
"
$addFields
"
:
{
"
stored_date
"
:
{
"
$toDate
"
:
"
$stored
"
}
}
},
{
"
$match
"
:
{
"
stored_date
"
:
{
"
$gte
"
:
start_date
,
"
$lte
"
:
end_date
}
}
},
{
"
$group
"
:
{
"
_id
"
:
{
"
year
"
:
{
"
$year
"
:
"
$stored_date
"
},
"
month
"
:
{
"
$month
"
:
"
$stored_date
"
},
"
day
"
:
{
"
$dayOfMonth
"
:
"
$stored_date
"
},
{
"
$addFields
"
:
{
"
stored_date
"
:
{
"
$toDate
"
:
"
$stored
"
}
}
},
"
count
"
:
{
"
$sum
"
:
1
}
}
},
{
"
$sort
"
:
{
"
_id.year
"
:
1
,
"
_id.month
"
:
1
,
"
_id.day
"
:
1
}},
{
"
$match
"
:
{
"
stored_date
"
:
{
"
$gte
"
:
start_date
,
"
$lte
"
:
end_date
}
}
},
{
"
$group
"
:
{
"
_id
"
:
{
"
year
"
:
{
"
$year
"
:
"
$stored_date
"
},
"
month
"
:
{
"
$month
"
:
"
$stored_date
"
},
"
day
"
:
{
"
$dayOfMonth
"
:
"
$stored_date
"
}
},
"
count
"
:
{
"
$sum
"
:
1
}
}
},
{
"
$sort
"
:
{
"
_id.year
"
:
1
,
"
_id.month
"
:
1
,
"
_id.day
"
:
1
}
}
]
# Run the aggregation; using a dictionary comprehension to build a date->count map
history_data
=
{
f
"
{
entry
[
'
_id
'
][
'
year
'
]
}
-
{
entry
[
'
_id
'
][
'
month
'
]
:
02
d
}
-
{
entry
[
'
_id
'
][
'
day
'
]
:
02
d
}
"
:
entry
[
"
count
"
]
for
entry
in
statements_collection
.
aggregate
(
pipeline
)
}
history_data
=
list
(
collection
.
aggregate
(
pipeline
))
date_counts
=
{
f
"
{
(
start_date
+
timedelta
(
days
=
i
)).
strftime
(
'
%Y-%m-%d
'
)
}
"
:
0
for
i
in
range
(
7
)}
for
entry
in
history_data
:
key
=
f
"
{
entry
[
'
_id
'
][
'
year
'
]
}
-
{
entry
[
'
_id
'
][
'
month
'
]
:
02
d
}
-
{
entry
[
'
_id
'
][
'
day
'
]
:
02
d
}
"
date_counts
[
key
]
=
entry
[
"
count
"
]
statement_history
=
list
(
date_counts
.
values
())
return
JsonResponse
(
{
"
message
"
:
"
statistics collected
"
,
"
user_count
"
:
user_count
,
"
statement_count
"
:
statement_count
,
"
result_count
"
:
result_count
,
"
statement_history
"
:
statement_history
},
safe
=
False
,
status
=
status
.
HTTP_200_OK
,
)
\ No newline at end of file
# Build a complete 7-day history (fill missing dates with 0)
date_counts
=
{}
for
i
in
range
(
7
):
date_str
=
(
start_date
+
timedelta
(
days
=
i
)).
strftime
(
'
%Y-%m-%d
'
)
date_counts
[
date_str
]
=
history_data
.
get
(
date_str
,
0
)
statement_history
=
list
(
date_counts
.
values
())
return
JsonResponse
({
"
message
"
:
"
statistics collected
"
,
"
user_count
"
:
user_count
,
"
statement_count
"
:
statement_count
,
"
result_count
"
:
result_count
,
"
statement_history
"
:
statement_history
},
safe
=
False
,
status
=
status
.
HTTP_200_OK
)
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment