mirror of
https://gitlab.com/allianceauth/allianceauth.git
synced 2025-07-09 12:30:15 +02:00
Improve celery infos on Dashboard
This commit is contained in:
parent
ff7c9c48f3
commit
01164777ed
1
.gitignore
vendored
1
.gitignore
vendored
@ -76,3 +76,4 @@ celerybeat-schedule
|
|||||||
.flake8
|
.flake8
|
||||||
.pylintrc
|
.pylintrc
|
||||||
Makefile
|
Makefile
|
||||||
|
.isort.cfg
|
||||||
|
@ -3,10 +3,14 @@ from django.core.checks import register, Tags
|
|||||||
|
|
||||||
|
|
||||||
class AuthenticationConfig(AppConfig):
|
class AuthenticationConfig(AppConfig):
|
||||||
name = 'allianceauth.authentication'
|
name = "allianceauth.authentication"
|
||||||
label = 'authentication'
|
label = "authentication"
|
||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
super().ready()
|
from allianceauth.authentication import checks, signals # noqa: F401
|
||||||
from allianceauth.authentication import checks, signals
|
from allianceauth.authentication.task_statistics import (
|
||||||
|
signals as celery_signals,
|
||||||
|
)
|
||||||
|
|
||||||
register(Tags.security)(checks.check_login_scopes_setting)
|
register(Tags.security)(checks.check_login_scopes_setting)
|
||||||
|
celery_signals.reset_counters()
|
||||||
|
153
allianceauth/authentication/task_statistics/event_series.py
Normal file
153
allianceauth/authentication/task_statistics/event_series.py
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
import datetime as dt
|
||||||
|
from collections import namedtuple
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
from redis import Redis
|
||||||
|
from pytz import utc
|
||||||
|
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
_TaskCounts = namedtuple(
|
||||||
|
"_TaskCounts", ["succeeded", "retried", "failed", "total", "earliest_task", "hours"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def dashboard_results(hours: int) -> _TaskCounts:
|
||||||
|
"""Counts of all task events within the given timeframe."""
|
||||||
|
def earliest_if_exists(events: EventSeries, earliest: dt.datetime) -> list:
|
||||||
|
my_earliest = events.first_event(earliest=earliest)
|
||||||
|
return [my_earliest] if my_earliest else []
|
||||||
|
|
||||||
|
earliest = dt.datetime.utcnow() - dt.timedelta(hours=hours)
|
||||||
|
earliest_events = list()
|
||||||
|
succeeded = SucceededTaskSeries()
|
||||||
|
succeeded_count = succeeded.count(earliest=earliest)
|
||||||
|
earliest_events += earliest_if_exists(succeeded, earliest)
|
||||||
|
retried = RetriedTaskSeries()
|
||||||
|
retried_count = retried.count(earliest=earliest)
|
||||||
|
earliest_events += earliest_if_exists(retried, earliest)
|
||||||
|
failed = FailedTaskSeries()
|
||||||
|
failed_count = failed.count(earliest=earliest)
|
||||||
|
earliest_events += earliest_if_exists(failed, earliest)
|
||||||
|
return _TaskCounts(
|
||||||
|
succeeded=succeeded_count,
|
||||||
|
retried=retried_count,
|
||||||
|
failed=failed_count,
|
||||||
|
total=succeeded_count + retried_count + failed_count,
|
||||||
|
earliest_task=min(earliest_events) if earliest_events else None,
|
||||||
|
hours=hours,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EventSeries:
|
||||||
|
"""Base class for recording and analysing a series of events.
|
||||||
|
|
||||||
|
This class must be inherited from and the child class must define KEY_ID.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_ROOT_KEY = "ALLIANCEAUTH_TASK_SERIES"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
redis: Redis = None,
|
||||||
|
) -> None:
|
||||||
|
if type(self) == EventSeries:
|
||||||
|
raise TypeError("Can not instantiate base class.")
|
||||||
|
if not hasattr(self, "KEY_ID"):
|
||||||
|
raise ValueError("KEY_ID not defined")
|
||||||
|
self._redis = cache.get_master_client() if not redis else redis
|
||||||
|
if not isinstance(self._redis, Redis):
|
||||||
|
raise TypeError(
|
||||||
|
"This class requires a Redis client, but none was provided "
|
||||||
|
"and the default Django cache backend is not Redis either."
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _key_counter(self):
|
||||||
|
return f"{self._ROOT_KEY}_{self.KEY_ID}_COUNTER"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _key_sorted_set(self):
|
||||||
|
return f"{self._ROOT_KEY}_{self.KEY_ID}_SORTED_SET"
|
||||||
|
|
||||||
|
def add(self, event_time: dt.datetime = None) -> None:
|
||||||
|
"""Add event.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- event_time: timestamp of event. Will use current time if not specified.
|
||||||
|
"""
|
||||||
|
if not event_time:
|
||||||
|
event_time = dt.datetime.utcnow()
|
||||||
|
id = self._redis.incr(self._key_counter)
|
||||||
|
self._redis.zadd(self._key_sorted_set, {id: event_time.timestamp()})
|
||||||
|
|
||||||
|
def all(self) -> List[dt.datetime]:
|
||||||
|
"""List of all known events."""
|
||||||
|
return [
|
||||||
|
event[1]
|
||||||
|
for event in self._redis.zrangebyscore(
|
||||||
|
self._key_sorted_set,
|
||||||
|
"-inf",
|
||||||
|
"+inf",
|
||||||
|
withscores=True,
|
||||||
|
score_cast_func=self._cast_scores_to_dt,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def clear(self) -> None:
|
||||||
|
"""Clear all events."""
|
||||||
|
self._redis.delete(self._key_sorted_set)
|
||||||
|
self._redis.delete(self._key_counter)
|
||||||
|
|
||||||
|
def count(self, earliest: dt.datetime = None, latest: dt.datetime = None) -> int:
|
||||||
|
"""Count of events, can be restricted to given timeframe.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- earliest: Date of first events to count(inclusive), or -infinite if not specified
|
||||||
|
- latest: Date of last events to count(inclusive), or +infinite if not specified
|
||||||
|
"""
|
||||||
|
min = "-inf" if not earliest else earliest.timestamp()
|
||||||
|
max = "+inf" if not latest else latest.timestamp()
|
||||||
|
return self._redis.zcount(self._key_sorted_set, min=min, max=max)
|
||||||
|
|
||||||
|
def first_event(self, earliest: dt.datetime = None) -> Optional[dt.datetime]:
|
||||||
|
"""Date/Time of first event. Returns `None` if series has no events.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- earliest: Date of first events to count(inclusive), or any if not specified
|
||||||
|
"""
|
||||||
|
min = "-inf" if not earliest else earliest.timestamp()
|
||||||
|
event = self._redis.zrangebyscore(
|
||||||
|
self._key_sorted_set,
|
||||||
|
min,
|
||||||
|
"+inf",
|
||||||
|
withscores=True,
|
||||||
|
start=0,
|
||||||
|
num=1,
|
||||||
|
score_cast_func=self._cast_scores_to_dt,
|
||||||
|
)
|
||||||
|
if not event:
|
||||||
|
return None
|
||||||
|
return event[0][1]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _cast_scores_to_dt(score) -> dt.datetime:
|
||||||
|
return dt.datetime.fromtimestamp(float(score), tz=utc)
|
||||||
|
|
||||||
|
|
||||||
|
class SucceededTaskSeries(EventSeries):
|
||||||
|
"""A task has succeeded."""
|
||||||
|
|
||||||
|
KEY_ID = "SUCCEEDED"
|
||||||
|
|
||||||
|
|
||||||
|
class RetriedTaskSeries(EventSeries):
|
||||||
|
"""A task has been retried."""
|
||||||
|
|
||||||
|
KEY_ID = "RETRIED"
|
||||||
|
|
||||||
|
|
||||||
|
class FailedTaskSeries(EventSeries):
|
||||||
|
"""A task has failed."""
|
||||||
|
|
||||||
|
KEY_ID = "FAILED"
|
42
allianceauth/authentication/task_statistics/signals.py
Normal file
42
allianceauth/authentication/task_statistics/signals.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from celery.signals import task_failure, task_retry, task_success, worker_ready
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from .event_series import FailedTaskSeries, RetriedTaskSeries, SucceededTaskSeries
|
||||||
|
|
||||||
|
|
||||||
|
def reset_counters():
|
||||||
|
"""Reset all counters for the celery status."""
|
||||||
|
SucceededTaskSeries().clear()
|
||||||
|
FailedTaskSeries().clear()
|
||||||
|
RetriedTaskSeries().clear()
|
||||||
|
|
||||||
|
|
||||||
|
def is_enabled() -> bool:
|
||||||
|
return not bool(
|
||||||
|
getattr(settings, "ALLIANCEAUTH_DASHBOARD_TASK_STATISTICS_DISABLED", False)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@worker_ready.connect
|
||||||
|
def reset_counters_when_celery_restarted(*args, **kwargs):
|
||||||
|
if is_enabled():
|
||||||
|
reset_counters()
|
||||||
|
|
||||||
|
|
||||||
|
@task_success.connect
|
||||||
|
def record_task_succeeded(*args, **kwargs):
|
||||||
|
if is_enabled():
|
||||||
|
SucceededTaskSeries().add()
|
||||||
|
|
||||||
|
|
||||||
|
@task_retry.connect
|
||||||
|
def record_task_retried(*args, **kwargs):
|
||||||
|
if is_enabled():
|
||||||
|
RetriedTaskSeries().add()
|
||||||
|
|
||||||
|
|
||||||
|
@task_failure.connect
|
||||||
|
def record_task_failed(*args, **kwargs):
|
||||||
|
if is_enabled():
|
||||||
|
FailedTaskSeries().add()
|
@ -0,0 +1,222 @@
|
|||||||
|
import datetime as dt
|
||||||
|
|
||||||
|
from pytz import utc
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.utils.timezone import now
|
||||||
|
|
||||||
|
from allianceauth.authentication.task_statistics.event_series import (
|
||||||
|
EventSeries,
|
||||||
|
FailedTaskSeries,
|
||||||
|
RetriedTaskSeries,
|
||||||
|
SucceededTaskSeries,
|
||||||
|
dashboard_results,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestEventSeries(TestCase):
|
||||||
|
"""Testing EventSeries class."""
|
||||||
|
|
||||||
|
class IncompleteEvents(EventSeries):
|
||||||
|
"""Child class without KEY ID"""
|
||||||
|
|
||||||
|
class MyEventSeries(EventSeries):
|
||||||
|
KEY_ID = "TEST"
|
||||||
|
|
||||||
|
def test_should_create_object(self):
|
||||||
|
# when
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
# then
|
||||||
|
self.assertIsInstance(events, self.MyEventSeries)
|
||||||
|
|
||||||
|
def test_should_abort_when_redis_client_invalid(self):
|
||||||
|
with self.assertRaises(TypeError):
|
||||||
|
self.MyEventSeries(redis="invalid")
|
||||||
|
|
||||||
|
def test_should_not_allow_instantiation_of_base_class(self):
|
||||||
|
with self.assertRaises(TypeError):
|
||||||
|
EventSeries()
|
||||||
|
|
||||||
|
def test_should_not_allow_creating_child_class_without_key_id(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self.IncompleteEvents()
|
||||||
|
|
||||||
|
def test_should_add_event(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
# when
|
||||||
|
events.add()
|
||||||
|
# then
|
||||||
|
result = events.all()
|
||||||
|
self.assertEqual(len(result), 1)
|
||||||
|
self.assertAlmostEqual(result[0], now(), delta=dt.timedelta(seconds=30))
|
||||||
|
|
||||||
|
def test_should_add_event_with_specified_time(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
my_time = dt.datetime(2021, 11, 1, 12, 15, tzinfo=utc)
|
||||||
|
# when
|
||||||
|
events.add(my_time)
|
||||||
|
# then
|
||||||
|
result = events.all()
|
||||||
|
self.assertEqual(len(result), 1)
|
||||||
|
self.assertAlmostEqual(result[0], my_time, delta=dt.timedelta(seconds=30))
|
||||||
|
|
||||||
|
def test_should_count_events(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
events.add()
|
||||||
|
events.add()
|
||||||
|
# when
|
||||||
|
result = events.count()
|
||||||
|
# then
|
||||||
|
self.assertEqual(result, 2)
|
||||||
|
|
||||||
|
def test_should_count_zero(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
# when
|
||||||
|
result = events.count()
|
||||||
|
# then
|
||||||
|
self.assertEqual(result, 0)
|
||||||
|
|
||||||
|
def test_should_count_events_within_timeframe_1(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 0, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 10, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 15, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 30, tzinfo=utc))
|
||||||
|
# when
|
||||||
|
result = events.count(
|
||||||
|
earliest=dt.datetime(2021, 12, 1, 12, 8, tzinfo=utc),
|
||||||
|
latest=dt.datetime(2021, 12, 1, 12, 17, tzinfo=utc),
|
||||||
|
)
|
||||||
|
# then
|
||||||
|
self.assertEqual(result, 2)
|
||||||
|
|
||||||
|
def test_should_count_events_within_timeframe_2(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 0, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 10, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 15, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 30, tzinfo=utc))
|
||||||
|
# when
|
||||||
|
result = events.count(earliest=dt.datetime(2021, 12, 1, 12, 8))
|
||||||
|
# then
|
||||||
|
self.assertEqual(result, 3)
|
||||||
|
|
||||||
|
def test_should_count_events_within_timeframe_3(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 0, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 10, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 15, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 30, tzinfo=utc))
|
||||||
|
# when
|
||||||
|
result = events.count(latest=dt.datetime(2021, 12, 1, 12, 12))
|
||||||
|
# then
|
||||||
|
self.assertEqual(result, 2)
|
||||||
|
|
||||||
|
def test_should_clear_events(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
events.add()
|
||||||
|
events.add()
|
||||||
|
# when
|
||||||
|
events.clear()
|
||||||
|
# then
|
||||||
|
self.assertEqual(events.count(), 0)
|
||||||
|
|
||||||
|
def test_should_return_date_of_first_event(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 0, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 10, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 15, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 30, tzinfo=utc))
|
||||||
|
# when
|
||||||
|
result = events.first_event()
|
||||||
|
# then
|
||||||
|
self.assertEqual(result, dt.datetime(2021, 12, 1, 12, 0, tzinfo=utc))
|
||||||
|
|
||||||
|
def test_should_return_date_of_first_event_with_range(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 0, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 10, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 15, tzinfo=utc))
|
||||||
|
events.add(dt.datetime(2021, 12, 1, 12, 30, tzinfo=utc))
|
||||||
|
# when
|
||||||
|
result = events.first_event(
|
||||||
|
earliest=dt.datetime(2021, 12, 1, 12, 8, tzinfo=utc)
|
||||||
|
)
|
||||||
|
# then
|
||||||
|
self.assertEqual(result, dt.datetime(2021, 12, 1, 12, 10, tzinfo=utc))
|
||||||
|
|
||||||
|
def test_should_return_all_events(self):
|
||||||
|
# given
|
||||||
|
events = self.MyEventSeries()
|
||||||
|
events.clear()
|
||||||
|
events.add()
|
||||||
|
events.add()
|
||||||
|
# when
|
||||||
|
results = events.all()
|
||||||
|
# then
|
||||||
|
self.assertEqual(len(results), 2)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDashboardResults(TestCase):
|
||||||
|
def test_should_return_counts_for_given_timeframe_only(self):
|
||||||
|
# given
|
||||||
|
earliest_task = now() - dt.timedelta(minutes=15)
|
||||||
|
succeeded = SucceededTaskSeries()
|
||||||
|
succeeded.clear()
|
||||||
|
succeeded.add(now() - dt.timedelta(hours=1, seconds=1))
|
||||||
|
succeeded.add(earliest_task)
|
||||||
|
succeeded.add()
|
||||||
|
succeeded.add()
|
||||||
|
retried = RetriedTaskSeries()
|
||||||
|
retried.clear()
|
||||||
|
retried.add(now() - dt.timedelta(hours=1, seconds=1))
|
||||||
|
retried.add(now() - dt.timedelta(seconds=30))
|
||||||
|
retried.add()
|
||||||
|
failed = FailedTaskSeries()
|
||||||
|
failed.clear()
|
||||||
|
failed.add(now() - dt.timedelta(hours=1, seconds=1))
|
||||||
|
failed.add()
|
||||||
|
# when
|
||||||
|
results = dashboard_results(hours=1)
|
||||||
|
# then
|
||||||
|
self.assertEqual(results.succeeded, 3)
|
||||||
|
self.assertEqual(results.retried, 2)
|
||||||
|
self.assertEqual(results.failed, 1)
|
||||||
|
self.assertEqual(results.total, 6)
|
||||||
|
self.assertEqual(results.earliest_task, earliest_task)
|
||||||
|
|
||||||
|
def test_should_work_with_no_data(self):
|
||||||
|
# given
|
||||||
|
succeeded = SucceededTaskSeries()
|
||||||
|
succeeded.clear()
|
||||||
|
retried = RetriedTaskSeries()
|
||||||
|
retried.clear()
|
||||||
|
failed = FailedTaskSeries()
|
||||||
|
failed.clear()
|
||||||
|
# when
|
||||||
|
results = dashboard_results(hours=1)
|
||||||
|
# then
|
||||||
|
self.assertEqual(results.succeeded, 0)
|
||||||
|
self.assertEqual(results.retried, 0)
|
||||||
|
self.assertEqual(results.failed, 0)
|
||||||
|
self.assertEqual(results.total, 0)
|
||||||
|
self.assertIsNone(results.earliest_task)
|
@ -0,0 +1,93 @@
|
|||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from celery.exceptions import Retry
|
||||||
|
|
||||||
|
from django.test import TestCase, override_settings
|
||||||
|
|
||||||
|
from allianceauth.authentication.task_statistics.event_series import (
|
||||||
|
FailedTaskSeries,
|
||||||
|
RetriedTaskSeries,
|
||||||
|
SucceededTaskSeries,
|
||||||
|
)
|
||||||
|
from allianceauth.authentication.task_statistics.signals import (
|
||||||
|
reset_counters,
|
||||||
|
is_enabled,
|
||||||
|
)
|
||||||
|
from allianceauth.eveonline.tasks import update_character
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
CELERY_ALWAYS_EAGER=True, ALLIANCEAUTH_DASHBOARD_TASK_STATISTICS_DISABLED=False
|
||||||
|
)
|
||||||
|
class TestTaskSignals(TestCase):
|
||||||
|
fixtures = ["disable_analytics"]
|
||||||
|
|
||||||
|
def test_should_record_successful_task(self):
|
||||||
|
# given
|
||||||
|
events = SucceededTaskSeries()
|
||||||
|
events.clear()
|
||||||
|
# when
|
||||||
|
with patch(
|
||||||
|
"allianceauth.eveonline.tasks.EveCharacter.objects.update_character"
|
||||||
|
) as mock_update:
|
||||||
|
mock_update.return_value = None
|
||||||
|
update_character.delay(1)
|
||||||
|
# then
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
|
||||||
|
def test_should_record_retried_task(self):
|
||||||
|
# given
|
||||||
|
events = RetriedTaskSeries()
|
||||||
|
events.clear()
|
||||||
|
# when
|
||||||
|
with patch(
|
||||||
|
"allianceauth.eveonline.tasks.EveCharacter.objects.update_character"
|
||||||
|
) as mock_update:
|
||||||
|
mock_update.side_effect = Retry
|
||||||
|
update_character.delay(1)
|
||||||
|
# then
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
|
||||||
|
def test_should_record_failed_task(self):
|
||||||
|
# given
|
||||||
|
events = FailedTaskSeries()
|
||||||
|
events.clear()
|
||||||
|
# when
|
||||||
|
with patch(
|
||||||
|
"allianceauth.eveonline.tasks.EveCharacter.objects.update_character"
|
||||||
|
) as mock_update:
|
||||||
|
mock_update.side_effect = RuntimeError
|
||||||
|
update_character.delay(1)
|
||||||
|
# then
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(ALLIANCEAUTH_DASHBOARD_TASK_STATISTICS_DISABLED=False)
|
||||||
|
class TestResetCounters(TestCase):
|
||||||
|
def test_should_reset_counters(self):
|
||||||
|
# given
|
||||||
|
succeeded = SucceededTaskSeries()
|
||||||
|
succeeded.clear()
|
||||||
|
succeeded.add()
|
||||||
|
retried = RetriedTaskSeries()
|
||||||
|
retried.clear()
|
||||||
|
retried.add()
|
||||||
|
failed = FailedTaskSeries()
|
||||||
|
failed.clear()
|
||||||
|
failed.add()
|
||||||
|
# when
|
||||||
|
reset_counters()
|
||||||
|
# then
|
||||||
|
self.assertEqual(succeeded.count(), 0)
|
||||||
|
self.assertEqual(retried.count(), 0)
|
||||||
|
self.assertEqual(failed.count(), 0)
|
||||||
|
|
||||||
|
|
||||||
|
class TestIsEnabled(TestCase):
|
||||||
|
@override_settings(ALLIANCEAUTH_DASHBOARD_TASK_STATISTICS_DISABLED=False)
|
||||||
|
def test_enabled(self):
|
||||||
|
self.assertTrue(is_enabled())
|
||||||
|
|
||||||
|
@override_settings(ALLIANCEAUTH_DASHBOARD_TASK_STATISTICS_DISABLED=True)
|
||||||
|
def test_disabled(self):
|
||||||
|
self.assertFalse(is_enabled())
|
@ -188,7 +188,7 @@ class TestCaseWithTestData(TestCase):
|
|||||||
corporation_id=5432,
|
corporation_id=5432,
|
||||||
corporation_name="Xavier's School for Gifted Youngsters",
|
corporation_name="Xavier's School for Gifted Youngsters",
|
||||||
corporation_ticker='MUTNT',
|
corporation_ticker='MUTNT',
|
||||||
alliance_id = None,
|
alliance_id=None,
|
||||||
faction_id=999,
|
faction_id=999,
|
||||||
faction_name='The X-Men',
|
faction_name='The X-Men',
|
||||||
)
|
)
|
||||||
@ -206,6 +206,7 @@ class TestCaseWithTestData(TestCase):
|
|||||||
cls.user_4.profile.save()
|
cls.user_4.profile.save()
|
||||||
EveFactionInfo.objects.create(faction_id=999, faction_name='The X-Men')
|
EveFactionInfo.objects.create(faction_id=999, faction_name='The X-Men')
|
||||||
|
|
||||||
|
|
||||||
def make_generic_search_request(ModelClass: type, search_term: str):
|
def make_generic_search_request(ModelClass: type, search_term: str):
|
||||||
User.objects.create_superuser(
|
User.objects.create_superuser(
|
||||||
username='superuser', password='secret', email='admin@example.com'
|
username='superuser', password='secret', email='admin@example.com'
|
||||||
@ -218,6 +219,7 @@ def make_generic_search_request(ModelClass: type, search_term: str):
|
|||||||
|
|
||||||
|
|
||||||
class TestCharacterOwnershipAdmin(TestCaseWithTestData):
|
class TestCharacterOwnershipAdmin(TestCaseWithTestData):
|
||||||
|
fixtures = ["disable_analytics"]
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.modeladmin = CharacterOwnershipAdmin(
|
self.modeladmin = CharacterOwnershipAdmin(
|
||||||
@ -244,6 +246,7 @@ class TestCharacterOwnershipAdmin(TestCaseWithTestData):
|
|||||||
|
|
||||||
|
|
||||||
class TestOwnershipRecordAdmin(TestCaseWithTestData):
|
class TestOwnershipRecordAdmin(TestCaseWithTestData):
|
||||||
|
fixtures = ["disable_analytics"]
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.modeladmin = OwnershipRecordAdmin(
|
self.modeladmin = OwnershipRecordAdmin(
|
||||||
@ -270,6 +273,7 @@ class TestOwnershipRecordAdmin(TestCaseWithTestData):
|
|||||||
|
|
||||||
|
|
||||||
class TestStateAdmin(TestCaseWithTestData):
|
class TestStateAdmin(TestCaseWithTestData):
|
||||||
|
fixtures = ["disable_analytics"]
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.modeladmin = StateAdmin(
|
self.modeladmin = StateAdmin(
|
||||||
@ -299,6 +303,7 @@ class TestStateAdmin(TestCaseWithTestData):
|
|||||||
|
|
||||||
|
|
||||||
class TestUserAdmin(TestCaseWithTestData):
|
class TestUserAdmin(TestCaseWithTestData):
|
||||||
|
fixtures = ["disable_analytics"]
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.factory = RequestFactory()
|
self.factory = RequestFactory()
|
||||||
@ -344,7 +349,7 @@ class TestUserAdmin(TestCaseWithTestData):
|
|||||||
self.assertEqual(user_main_organization(self.user_3), expected)
|
self.assertEqual(user_main_organization(self.user_3), expected)
|
||||||
|
|
||||||
def test_user_main_organization_u4(self):
|
def test_user_main_organization_u4(self):
|
||||||
expected="Xavier's School for Gifted Youngsters<br>The X-Men"
|
expected = "Xavier's School for Gifted Youngsters<br>The X-Men"
|
||||||
self.assertEqual(user_main_organization(self.user_4), expected)
|
self.assertEqual(user_main_organization(self.user_4), expected)
|
||||||
|
|
||||||
def test_characters_u1(self):
|
def test_characters_u1(self):
|
||||||
|
@ -55,7 +55,6 @@ TEST_VERSION = '2.6.5'
|
|||||||
|
|
||||||
|
|
||||||
class TestStatusOverviewTag(TestCase):
|
class TestStatusOverviewTag(TestCase):
|
||||||
|
|
||||||
@patch(MODULE_PATH + '.admin_status.__version__', TEST_VERSION)
|
@patch(MODULE_PATH + '.admin_status.__version__', TEST_VERSION)
|
||||||
@patch(MODULE_PATH + '.admin_status._fetch_celery_queue_length')
|
@patch(MODULE_PATH + '.admin_status._fetch_celery_queue_length')
|
||||||
@patch(MODULE_PATH + '.admin_status._current_version_summary')
|
@patch(MODULE_PATH + '.admin_status._current_version_summary')
|
||||||
@ -66,6 +65,7 @@ class TestStatusOverviewTag(TestCase):
|
|||||||
mock_current_version_info,
|
mock_current_version_info,
|
||||||
mock_fetch_celery_queue_length
|
mock_fetch_celery_queue_length
|
||||||
):
|
):
|
||||||
|
# given
|
||||||
notifications = {
|
notifications = {
|
||||||
'notifications': GITHUB_NOTIFICATION_ISSUES[:5]
|
'notifications': GITHUB_NOTIFICATION_ISSUES[:5]
|
||||||
}
|
}
|
||||||
@ -83,22 +83,20 @@ class TestStatusOverviewTag(TestCase):
|
|||||||
}
|
}
|
||||||
mock_current_version_info.return_value = version_info
|
mock_current_version_info.return_value = version_info
|
||||||
mock_fetch_celery_queue_length.return_value = 3
|
mock_fetch_celery_queue_length.return_value = 3
|
||||||
|
# when
|
||||||
result = status_overview()
|
result = status_overview()
|
||||||
expected = {
|
# then
|
||||||
'notifications': GITHUB_NOTIFICATION_ISSUES[:5],
|
self.assertEqual(result["notifications"], GITHUB_NOTIFICATION_ISSUES[:5])
|
||||||
'latest_major': True,
|
self.assertTrue(result["latest_major"])
|
||||||
'latest_minor': True,
|
self.assertTrue(result["latest_minor"])
|
||||||
'latest_patch': True,
|
self.assertTrue(result["latest_patch"])
|
||||||
'latest_beta': False,
|
self.assertFalse(result["latest_beta"])
|
||||||
'current_version': TEST_VERSION,
|
self.assertEqual(result["current_version"], TEST_VERSION)
|
||||||
'latest_major_version': '2.4.5',
|
self.assertEqual(result["latest_major_version"], '2.4.5')
|
||||||
'latest_minor_version': '2.4.0',
|
self.assertEqual(result["latest_minor_version"], '2.4.0')
|
||||||
'latest_patch_version': '2.4.5',
|
self.assertEqual(result["latest_patch_version"], '2.4.5')
|
||||||
'latest_beta_version': '2.4.4a1',
|
self.assertEqual(result["latest_beta_version"], '2.4.4a1')
|
||||||
'task_queue_length': 3,
|
self.assertEqual(result["task_queue_length"], 3)
|
||||||
}
|
|
||||||
self.assertEqual(result, expected)
|
|
||||||
|
|
||||||
|
|
||||||
class TestNotifications(TestCase):
|
class TestNotifications(TestCase):
|
||||||
|
@ -95,6 +95,11 @@ ul.list-group.list-group-horizontal > li.list-group-item {
|
|||||||
.table-aa > tbody > tr:last-child {
|
.table-aa > tbody > tr:last-child {
|
||||||
border-bottom: none;
|
border-bottom: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.task-status-progress-bar {
|
||||||
|
font-size: 15px!important;
|
||||||
|
line-height: normal!important;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* highlight active menu items
|
/* highlight active menu items
|
||||||
|
@ -0,0 +1,12 @@
|
|||||||
|
{% load humanize %}
|
||||||
|
|
||||||
|
<div
|
||||||
|
class="progress-bar progress-bar-{{ level }} task-status-progress-bar"
|
||||||
|
role="progressbar"
|
||||||
|
aria-valuenow="{% widthratio tasks_count tasks_total 100 %}"
|
||||||
|
aria-valuemin="0"
|
||||||
|
aria-valuemax="100"
|
||||||
|
style="width: {% widthratio tasks_count tasks_total 100 %}%;"
|
||||||
|
title="{{ tasks_count|intcomma }} {{ label }}">
|
||||||
|
{% widthratio tasks_count tasks_total 100 %}%
|
||||||
|
</div>
|
@ -1,4 +1,6 @@
|
|||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
|
{% load humanize %}
|
||||||
|
|
||||||
<div class="col-sm-12">
|
<div class="col-sm-12">
|
||||||
<div class="row vertical-flexbox-row2">
|
<div class="row vertical-flexbox-row2">
|
||||||
<div class="col-sm-6">
|
<div class="col-sm-6">
|
||||||
@ -75,29 +77,20 @@
|
|||||||
<div class="panel panel-primary" style="height:50%;">
|
<div class="panel panel-primary" style="height:50%;">
|
||||||
<div class="panel-heading text-center"><h3 class="panel-title">{% translate "Task Queue" %}</h3></div>
|
<div class="panel-heading text-center"><h3 class="panel-title">{% translate "Task Queue" %}</h3></div>
|
||||||
<div class="panel-body flex-center-horizontal">
|
<div class="panel-body flex-center-horizontal">
|
||||||
|
<p>
|
||||||
|
{% blocktranslate with total=tasks_total|intcomma latest=earliest_task|timesince|default_if_none:"?" %}
|
||||||
|
Status of {{ total }} processed tasks • last {{ latest }}</p>
|
||||||
|
{% endblocktranslate %}
|
||||||
<div class="progress" style="height: 21px;">
|
<div class="progress" style="height: 21px;">
|
||||||
<div class="progress-bar
|
{% include "allianceauth/admin-status/celery_bar_partial.html" with label="suceeded" level="success" tasks_count=tasks_succeeded %}
|
||||||
{% if task_queue_length > 500 %}
|
{% include "allianceauth/admin-status/celery_bar_partial.html" with label="retried" level="info" tasks_count=tasks_retried %}
|
||||||
progress-bar-danger
|
{% include "allianceauth/admin-status/celery_bar_partial.html" with label="failed" level="danger" tasks_count=tasks_failed %}
|
||||||
{% elif task_queue_length > 100 %}
|
|
||||||
progress-bar-warning
|
|
||||||
{% else %}
|
|
||||||
progress-bar-success
|
|
||||||
{% endif %}
|
|
||||||
" role="progressbar" aria-valuenow="{% widthratio task_queue_length 500 100 %}"
|
|
||||||
aria-valuemin="0" aria-valuemax="100"
|
|
||||||
style="width: {% widthratio task_queue_length 500 100 %}%;">
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
<p>
|
||||||
{% if task_queue_length < 0 %}
|
{% blocktranslate with queue_length=task_queue_length|default_if_none:"?"|intcomma %}
|
||||||
{% translate "Error retrieving task queue length" %}
|
{{ queue_length }} queued tasks
|
||||||
{% else %}
|
{% endblocktranslate %}
|
||||||
{% blocktrans trimmed count tasks=task_queue_length %}
|
</p>
|
||||||
{{ tasks }} task
|
|
||||||
{% plural %}
|
|
||||||
{{ tasks }} tasks
|
|
||||||
{% endblocktrans %}
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
import logging
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import requests
|
|
||||||
import amqp.exceptions
|
import amqp.exceptions
|
||||||
from packaging.version import Version as Pep440Version, InvalidVersion
|
import requests
|
||||||
from celery.app import app_or_default
|
from celery.app import app_or_default
|
||||||
|
from packaging.version import InvalidVersion
|
||||||
|
from packaging.version import Version as Pep440Version
|
||||||
|
|
||||||
from django import template
|
from django import template
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -11,6 +13,7 @@ from django.core.cache import cache
|
|||||||
|
|
||||||
from allianceauth import __version__
|
from allianceauth import __version__
|
||||||
|
|
||||||
|
from ..authentication.task_statistics.event_series import dashboard_results
|
||||||
|
|
||||||
register = template.Library()
|
register = template.Library()
|
||||||
|
|
||||||
@ -36,30 +39,51 @@ logger = logging.getLogger(__name__)
|
|||||||
@register.inclusion_tag('allianceauth/admin-status/overview.html')
|
@register.inclusion_tag('allianceauth/admin-status/overview.html')
|
||||||
def status_overview() -> dict:
|
def status_overview() -> dict:
|
||||||
response = {
|
response = {
|
||||||
'notifications': list(),
|
"notifications": list(),
|
||||||
'current_version': __version__,
|
"current_version": __version__,
|
||||||
'task_queue_length': -1,
|
"task_queue_length": None,
|
||||||
|
"tasks_succeeded": 0,
|
||||||
|
"tasks_retried": 0,
|
||||||
|
"tasks_failed": 0,
|
||||||
|
"tasks_total": 0,
|
||||||
|
"tasks_hours": 0,
|
||||||
|
"earliest_task": None
|
||||||
}
|
}
|
||||||
response.update(_current_notifications())
|
response.update(_current_notifications())
|
||||||
response.update(_current_version_summary())
|
response.update(_current_version_summary())
|
||||||
response.update({'task_queue_length': _fetch_celery_queue_length()})
|
response.update({'task_queue_length': _fetch_celery_queue_length()})
|
||||||
|
response.update(_celery_stats())
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def _fetch_celery_queue_length() -> int:
|
def _celery_stats() -> dict:
|
||||||
|
hours = getattr(settings, "ALLIANCEAUTH_DASHBOARD_TASKS_MAX_HOURS", 24)
|
||||||
|
results = dashboard_results(hours=hours)
|
||||||
|
return {
|
||||||
|
"tasks_succeeded": results.succeeded,
|
||||||
|
"tasks_retried": results.retried,
|
||||||
|
"tasks_failed": results.failed,
|
||||||
|
"tasks_total": results.total,
|
||||||
|
"tasks_hours": results.hours,
|
||||||
|
"earliest_task": results.earliest_task
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _fetch_celery_queue_length() -> Optional[int]:
|
||||||
try:
|
try:
|
||||||
app = app_or_default(None)
|
app = app_or_default(None)
|
||||||
with app.connection_or_acquire() as conn:
|
with app.connection_or_acquire() as conn:
|
||||||
return conn.default_channel.queue_declare(
|
result = conn.default_channel.queue_declare(
|
||||||
queue=getattr(settings, 'CELERY_DEFAULT_QUEUE', 'celery'),
|
queue=getattr(settings, 'CELERY_DEFAULT_QUEUE', 'celery'),
|
||||||
passive=True
|
passive=True
|
||||||
).message_count
|
)
|
||||||
|
return result.message_count
|
||||||
except amqp.exceptions.ChannelError:
|
except amqp.exceptions.ChannelError:
|
||||||
# Queue doesn't exist, probably empty
|
# Queue doesn't exist, probably empty
|
||||||
return 0
|
return 0
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Failed to get celery queue length")
|
logger.exception("Failed to get celery queue length")
|
||||||
return -1
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _current_notifications() -> dict:
|
def _current_notifications() -> dict:
|
||||||
|
@ -7,3 +7,18 @@ The content of the dashboard is specific to the logged in user. It has a sidebar
|
|||||||
For admin users the dashboard shows additional technical information about the AA instance.
|
For admin users the dashboard shows additional technical information about the AA instance.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
## Settings
|
||||||
|
|
||||||
|
Here is a list of available settings for the dashboard. They can be configured by adding them to your AA settings file (``local.py``).
|
||||||
|
Note that all settings are optional and the app will use the documented default settings if they are not used.
|
||||||
|
|
||||||
|
```eval_rst
|
||||||
|
+-----------------------------------------------------+-------------------------------------------------------------------------+-----------+
|
||||||
|
| Name | Description | Default |
|
||||||
|
+=====================================================+=========================================================================+===========+
|
||||||
|
| ``ALLIANCEAUTH_DASHBOARD_TASKS_MAX_HOURS`` | Statistics will be calculated for task events not older than max hours. | ``24`` |
|
||||||
|
+-----------------------------------------------------+-------------------------------------------------------------------------+-----------+
|
||||||
|
| ``ALLIANCEAUTH_DASHBOARD_TASK_STATISTICS_DISABLED`` | Disables recording of task statistics. Used mainly in development. | ``False`` |
|
||||||
|
+-----------------------------------------------------+-------------------------------------------------------------------------+-----------+
|
||||||
|
```
|
||||||
|
@ -154,3 +154,5 @@ PASSWORD_HASHERS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
LOGGING = None # Comment out to enable logging for debugging
|
LOGGING = None # Comment out to enable logging for debugging
|
||||||
|
|
||||||
|
ALLIANCEAUTH_DASHBOARD_TASK_STATISTICS_DISABLED = True # disable for tests
|
||||||
|
@ -27,3 +27,5 @@ PASSWORD_HASHERS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
LOGGING = None # Comment out to enable logging for debugging
|
LOGGING = None # Comment out to enable logging for debugging
|
||||||
|
|
||||||
|
ALLIANCEAUTH_DASHBOARD_TASK_STATISTICS_DISABLED = True # disable for tests
|
||||||
|
Loading…
x
Reference in New Issue
Block a user