mirror of
https://gitlab.com/allianceauth/allianceauth.git
synced 2026-02-04 14:16:21 +01:00
Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
424246df26 | ||
|
|
563e2210ef | ||
|
|
02a1078005 | ||
|
|
30107de44e | ||
|
|
77a08cd218 | ||
|
|
e5a09027e5 | ||
|
|
52b6c5d341 | ||
|
|
8b895b76b5 | ||
|
|
babd71702f | ||
|
|
3ec3cbdff7 | ||
|
|
51611e1237 | ||
|
|
39519bab91 | ||
|
|
90dc6a4d4c | ||
|
|
53ffd7f885 | ||
|
|
efc7475228 | ||
|
|
380c41400b | ||
|
|
079c12a72e | ||
|
|
4f1ebedc44 | ||
|
|
66822107e3 | ||
|
|
7856cd5ce4 | ||
|
|
36b3077caa | ||
|
|
1786f3a642 | ||
|
|
55927c6f15 | ||
|
|
8fbe0ba45d | ||
|
|
1563805ddb | ||
|
|
c58ed53369 | ||
|
|
32128ace1c | ||
|
|
7290eaad7e | ||
|
|
f23d4f4dd1 | ||
|
|
ab3f10e6f2 | ||
|
|
20187cc73e | ||
|
|
1f55fbfccc |
@@ -5,7 +5,7 @@ manage online service access.
|
||||
# This will make sure the app is always imported when
|
||||
# Django starts so that shared_task will use this app.
|
||||
|
||||
__version__ = '3.6.0'
|
||||
__version__ = '3.7.1'
|
||||
__title__ = 'Alliance Auth'
|
||||
__url__ = 'https://gitlab.com/allianceauth/allianceauth'
|
||||
NAME = f'{__title__} v{__version__}'
|
||||
|
||||
@@ -65,7 +65,7 @@ class StateBackend(ModelBackend):
|
||||
# we've seen this character owner before. Re-attach to their old user account
|
||||
user = records[0].user
|
||||
if user.profile.main_character:
|
||||
if ownership.user.profile.main_character.character_id != token.character_id:
|
||||
if user.profile.main_character.character_id != token.character_id:
|
||||
## this is an alt, enforce main only due to trust issues in SSO.
|
||||
if request:
|
||||
messages.error("Unable to authenticate with this Character, Please log in with the main character associated with this account. Then add this character from the dashboard.")
|
||||
|
||||
0
allianceauth/authentication/core/__init__.py
Normal file
0
allianceauth/authentication/core/__init__.py
Normal file
48
allianceauth/authentication/core/celery_workers.py
Normal file
48
allianceauth/authentication/core/celery_workers.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""API for interacting with celery workers."""
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from amqp.exceptions import ChannelError
|
||||
from celery import current_app
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def active_tasks_count() -> Optional[int]:
|
||||
"""Return count of currently active tasks
|
||||
or None if celery workers are not online.
|
||||
"""
|
||||
inspect = current_app.control.inspect()
|
||||
return _tasks_count(inspect.active())
|
||||
|
||||
|
||||
def _tasks_count(data: dict) -> Optional[int]:
|
||||
"""Return count of tasks in data from celery inspect API."""
|
||||
try:
|
||||
tasks = itertools.chain(*data.values())
|
||||
except AttributeError:
|
||||
return None
|
||||
return len(list(tasks))
|
||||
|
||||
|
||||
def queued_tasks_count() -> Optional[int]:
|
||||
"""Return count of queued tasks. Return None if there was an error."""
|
||||
try:
|
||||
with current_app.connection_or_acquire() as conn:
|
||||
result = conn.default_channel.queue_declare(
|
||||
queue=getattr(settings, "CELERY_DEFAULT_QUEUE", "celery"), passive=True
|
||||
)
|
||||
return result.message_count
|
||||
|
||||
except ChannelError:
|
||||
# Queue doesn't exist, probably empty
|
||||
return 0
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get celery queue length")
|
||||
|
||||
return None
|
||||
@@ -4,13 +4,11 @@ import datetime as dt
|
||||
from typing import NamedTuple, Optional
|
||||
|
||||
from .event_series import EventSeries
|
||||
from .helpers import ItemCounter
|
||||
|
||||
# Global series for counting task events.
|
||||
succeeded_tasks = EventSeries("SUCCEEDED_TASKS")
|
||||
retried_tasks = EventSeries("RETRIED_TASKS")
|
||||
failed_tasks = EventSeries("FAILED_TASKS")
|
||||
running_tasks = ItemCounter("running_tasks")
|
||||
|
||||
|
||||
class _TaskCounts(NamedTuple):
|
||||
@@ -20,7 +18,6 @@ class _TaskCounts(NamedTuple):
|
||||
total: int
|
||||
earliest_task: Optional[dt.datetime]
|
||||
hours: int
|
||||
running: int
|
||||
|
||||
|
||||
def dashboard_results(hours: int) -> _TaskCounts:
|
||||
@@ -38,7 +35,6 @@ def dashboard_results(hours: int) -> _TaskCounts:
|
||||
earliest_events += earliest_if_exists(retried_tasks, earliest)
|
||||
failed_count = failed_tasks.count(earliest=earliest)
|
||||
earliest_events += earliest_if_exists(failed_tasks, earliest)
|
||||
running_count = running_tasks.value()
|
||||
return _TaskCounts(
|
||||
succeeded=succeeded_count,
|
||||
retried=retried_count,
|
||||
@@ -46,5 +42,4 @@ def dashboard_results(hours: int) -> _TaskCounts:
|
||||
total=succeeded_count + retried_count + failed_count,
|
||||
earliest_task=min(earliest_events) if earliest_events else None,
|
||||
hours=hours,
|
||||
running=running_count,
|
||||
)
|
||||
|
||||
@@ -5,59 +5,27 @@ import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from pytz import utc
|
||||
from redis import Redis, RedisError
|
||||
from redis import Redis
|
||||
|
||||
from allianceauth.utils.cache import get_redis_client
|
||||
from .helpers import get_redis_client_or_stub
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _RedisStub:
|
||||
"""Stub of a Redis client.
|
||||
|
||||
It's purpose is to prevent EventSeries objects from trying to access Redis
|
||||
when it is not available. e.g. when the Sphinx docs are rendered by readthedocs.org.
|
||||
"""
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def incr(self, *args, **kwargs):
|
||||
return 0
|
||||
|
||||
def zadd(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def zcount(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def zrangebyscore(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class EventSeries:
|
||||
"""API for recording and analyzing a series of events."""
|
||||
|
||||
_ROOT_KEY = "ALLIANCEAUTH_EVENT_SERIES"
|
||||
|
||||
def __init__(self, key_id: str, redis: Redis = None) -> None:
|
||||
self._redis = get_redis_client() if not redis else redis
|
||||
try:
|
||||
if not self._redis.ping():
|
||||
raise RuntimeError()
|
||||
except (AttributeError, RedisError, RuntimeError):
|
||||
logger.exception(
|
||||
"Failed to establish a connection with Redis. "
|
||||
"This EventSeries object is disabled.",
|
||||
)
|
||||
self._redis = _RedisStub()
|
||||
def __init__(self, key_id: str, redis: Optional[Redis] = None) -> None:
|
||||
self._redis = get_redis_client_or_stub() if not redis else redis
|
||||
self._key_id = str(key_id)
|
||||
self.clear()
|
||||
|
||||
@property
|
||||
def is_disabled(self):
|
||||
"""True when this object is disabled, e.g. Redis was not available at startup."""
|
||||
return isinstance(self._redis, _RedisStub)
|
||||
return hasattr(self._redis, "IS_STUB")
|
||||
|
||||
@property
|
||||
def _key_counter(self):
|
||||
@@ -97,7 +65,7 @@ class EventSeries:
|
||||
self._redis.delete(self._key_counter)
|
||||
|
||||
def count(self, earliest: dt.datetime = None, latest: dt.datetime = None) -> int:
|
||||
"""Count of events, can be restricted to given timeframe.
|
||||
"""Count of events, can be restricted to given time frame.
|
||||
|
||||
Args:
|
||||
- earliest: Date of first events to count(inclusive), or -infinite if not specified
|
||||
|
||||
@@ -1,44 +1,49 @@
|
||||
"""Helpers for Task Statistics."""
|
||||
|
||||
from typing import Optional
|
||||
import logging
|
||||
|
||||
from django.core.cache import cache
|
||||
from redis import Redis, RedisError
|
||||
|
||||
from allianceauth.utils.cache import get_redis_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ItemCounter:
|
||||
"""A process safe item counter."""
|
||||
class _RedisStub:
|
||||
"""Stub of a Redis client.
|
||||
|
||||
CACHE_KEY_BASE = "allianceauth-item-counter"
|
||||
DEFAULT_CACHE_TIMEOUT = 24 * 3600
|
||||
It's purpose is to prevent EventSeries objects from trying to access Redis
|
||||
when it is not available. e.g. when the Sphinx docs are rendered by readthedocs.org.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
if not name:
|
||||
raise ValueError("Must define a name")
|
||||
IS_STUB = True
|
||||
|
||||
self._name = str(name)
|
||||
def delete(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
@property
|
||||
def _cache_key(self) -> str:
|
||||
return f"{self.CACHE_KEY_BASE}-{self._name}"
|
||||
def incr(self, *args, **kwargs):
|
||||
return 0
|
||||
|
||||
def reset(self, init_value: int = 0):
|
||||
"""Reset counter to initial value."""
|
||||
cache.set(self._cache_key, init_value, self.DEFAULT_CACHE_TIMEOUT)
|
||||
def zadd(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def incr(self, delta: int = 1):
|
||||
"""Increment counter by delta."""
|
||||
try:
|
||||
cache.incr(self._cache_key, delta)
|
||||
except ValueError:
|
||||
pass
|
||||
def zcount(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def decr(self, delta: int = 1):
|
||||
"""Decrement counter by delta."""
|
||||
try:
|
||||
cache.decr(self._cache_key, delta)
|
||||
except ValueError:
|
||||
pass
|
||||
def zrangebyscore(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def value(self) -> Optional[int]:
|
||||
"""Return current value or None if not yet initialized."""
|
||||
return cache.get(self._cache_key)
|
||||
|
||||
def get_redis_client_or_stub() -> Redis:
|
||||
"""Return AA's default cache client or a stub if Redis is not available."""
|
||||
redis = get_redis_client()
|
||||
try:
|
||||
if not redis.ping():
|
||||
raise RuntimeError()
|
||||
except (AttributeError, RedisError, RuntimeError):
|
||||
logger.exception(
|
||||
"Failed to establish a connection with Redis. "
|
||||
"This EventSeries object is disabled.",
|
||||
)
|
||||
return _RedisStub()
|
||||
return redis
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
"""Signals for Task Statistics."""
|
||||
|
||||
from celery.signals import (
|
||||
task_failure, task_internal_error, task_postrun, task_prerun, task_retry,
|
||||
task_success, worker_ready,
|
||||
task_failure, task_internal_error, task_retry, task_success, worker_ready,
|
||||
)
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from .counters import (
|
||||
failed_tasks, retried_tasks, running_tasks, succeeded_tasks,
|
||||
)
|
||||
from .counters import failed_tasks, retried_tasks, succeeded_tasks
|
||||
|
||||
|
||||
def reset_counters():
|
||||
@@ -17,7 +14,6 @@ def reset_counters():
|
||||
succeeded_tasks.clear()
|
||||
failed_tasks.clear()
|
||||
retried_tasks.clear()
|
||||
running_tasks.reset()
|
||||
|
||||
|
||||
def is_enabled() -> bool:
|
||||
@@ -55,15 +51,3 @@ def record_task_failed(*args, **kwargs):
|
||||
def record_task_internal_error(*args, **kwargs):
|
||||
if is_enabled():
|
||||
failed_tasks.add()
|
||||
|
||||
|
||||
@task_prerun.connect
|
||||
def record_task_prerun(*args, **kwargs):
|
||||
if is_enabled():
|
||||
running_tasks.incr()
|
||||
|
||||
|
||||
@task_postrun.connect
|
||||
def record_task_postrun(*args, **kwargs):
|
||||
if is_enabled():
|
||||
running_tasks.decr()
|
||||
|
||||
@@ -4,11 +4,7 @@ from django.test import TestCase
|
||||
from django.utils.timezone import now
|
||||
|
||||
from allianceauth.authentication.task_statistics.counters import (
|
||||
dashboard_results,
|
||||
succeeded_tasks,
|
||||
retried_tasks,
|
||||
failed_tasks,
|
||||
running_tasks,
|
||||
dashboard_results, failed_tasks, retried_tasks, succeeded_tasks,
|
||||
)
|
||||
|
||||
|
||||
@@ -32,7 +28,6 @@ class TestDashboardResults(TestCase):
|
||||
failed_tasks.add(now() - dt.timedelta(hours=1, seconds=1))
|
||||
failed_tasks.add()
|
||||
|
||||
running_tasks.reset(8)
|
||||
# when
|
||||
results = dashboard_results(hours=1)
|
||||
# then
|
||||
@@ -41,14 +36,12 @@ class TestDashboardResults(TestCase):
|
||||
self.assertEqual(results.failed, 1)
|
||||
self.assertEqual(results.total, 6)
|
||||
self.assertEqual(results.earliest_task, earliest_task)
|
||||
self.assertEqual(results.running, 8)
|
||||
|
||||
def test_should_work_with_no_data(self):
|
||||
# given
|
||||
succeeded_tasks.clear()
|
||||
retried_tasks.clear()
|
||||
failed_tasks.clear()
|
||||
running_tasks.reset()
|
||||
# when
|
||||
results = dashboard_results(hours=1)
|
||||
# then
|
||||
@@ -57,4 +50,3 @@ class TestDashboardResults(TestCase):
|
||||
self.assertEqual(results.failed, 0)
|
||||
self.assertEqual(results.total, 0)
|
||||
self.assertIsNone(results.earliest_task)
|
||||
self.assertEqual(results.running, 0)
|
||||
|
||||
@@ -1,48 +1,19 @@
|
||||
import datetime as dt
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytz import utc
|
||||
from redis import RedisError
|
||||
|
||||
from django.test import TestCase
|
||||
from django.utils.timezone import now
|
||||
|
||||
from allianceauth.authentication.task_statistics.event_series import (
|
||||
EventSeries,
|
||||
_RedisStub,
|
||||
)
|
||||
from allianceauth.authentication.task_statistics.helpers import _RedisStub
|
||||
|
||||
MODULE_PATH = "allianceauth.authentication.task_statistics.event_series"
|
||||
|
||||
|
||||
class TestEventSeries(TestCase):
|
||||
def test_should_abort_without_redis_client(self):
|
||||
# when
|
||||
with patch(MODULE_PATH + ".get_redis_client") as mock:
|
||||
mock.return_value = None
|
||||
events = EventSeries("dummy")
|
||||
# then
|
||||
self.assertTrue(events._redis, _RedisStub)
|
||||
self.assertTrue(events.is_disabled)
|
||||
|
||||
def test_should_disable_itself_if_redis_not_available_1(self):
|
||||
# when
|
||||
with patch(MODULE_PATH + ".get_redis_client") as mock_get_master_client:
|
||||
mock_get_master_client.return_value.ping.side_effect = RedisError
|
||||
events = EventSeries("dummy")
|
||||
# then
|
||||
self.assertIsInstance(events._redis, _RedisStub)
|
||||
self.assertTrue(events.is_disabled)
|
||||
|
||||
def test_should_disable_itself_if_redis_not_available_2(self):
|
||||
# when
|
||||
with patch(MODULE_PATH + ".get_redis_client") as mock_get_master_client:
|
||||
mock_get_master_client.return_value.ping.return_value = False
|
||||
events = EventSeries("dummy")
|
||||
# then
|
||||
self.assertIsInstance(events._redis, _RedisStub)
|
||||
self.assertTrue(events.is_disabled)
|
||||
|
||||
def test_should_add_event(self):
|
||||
# given
|
||||
events = EventSeries("dummy")
|
||||
@@ -166,3 +137,15 @@ class TestEventSeries(TestCase):
|
||||
results = events.all()
|
||||
# then
|
||||
self.assertEqual(len(results), 2)
|
||||
|
||||
def test_should_not_report_as_disabled_when_initialized_normally(self):
|
||||
# given
|
||||
events = EventSeries("dummy")
|
||||
# when/then
|
||||
self.assertFalse(events.is_disabled)
|
||||
|
||||
def test_should_report_as_disabled_when_initialized_with_redis_stub(self):
|
||||
# given
|
||||
events = EventSeries("dummy", redis=_RedisStub())
|
||||
# when/then
|
||||
self.assertTrue(events.is_disabled)
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
from unittest import TestCase
|
||||
from unittest.mock import patch
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
from allianceauth.authentication.task_statistics.helpers import (
|
||||
_RedisStub, get_redis_client_or_stub,
|
||||
)
|
||||
|
||||
MODULE_PATH = "allianceauth.authentication.task_statistics.helpers"
|
||||
|
||||
|
||||
class TestGetRedisClient(TestCase):
|
||||
def test_should_return_mock_if_redis_not_available_1(self):
|
||||
# when
|
||||
with patch(MODULE_PATH + ".get_redis_client") as mock_get_master_client:
|
||||
mock_get_master_client.return_value.ping.side_effect = RedisError
|
||||
result = get_redis_client_or_stub()
|
||||
# then
|
||||
self.assertIsInstance(result, _RedisStub)
|
||||
|
||||
def test_should_return_mock_if_redis_not_available_2(self):
|
||||
# when
|
||||
with patch(MODULE_PATH + ".get_redis_client") as mock_get_master_client:
|
||||
mock_get_master_client.return_value.ping.return_value = False
|
||||
result = get_redis_client_or_stub()
|
||||
# then
|
||||
self.assertIsInstance(result, _RedisStub)
|
||||
@@ -5,7 +5,7 @@
|
||||
<select onchange="this.form.submit()" class="form-control" id="lang-select" name="language">
|
||||
{% get_language_info_list for LANGUAGES as languages %}
|
||||
{% for language in languages %}
|
||||
<option value="{{ language.code }}"{% if language.code == LANGUAGE_CODE %} selected="selected"{% endif %}>
|
||||
<option lang="{{ language.code }}" value="{{ language.code }}"{% if language.code == LANGUAGE_CODE %} selected="selected"{% endif %}>
|
||||
{{ language.name_local|capfirst }} ({{ language.code }})
|
||||
</option>
|
||||
{% endfor %}
|
||||
|
||||
0
allianceauth/authentication/tests/core/__init__.py
Normal file
0
allianceauth/authentication/tests/core/__init__.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from amqp.exceptions import ChannelError
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from allianceauth.authentication.core.celery_workers import (
|
||||
active_tasks_count, queued_tasks_count,
|
||||
)
|
||||
|
||||
MODULE_PATH = "allianceauth.authentication.core.celery_workers"
|
||||
|
||||
|
||||
@patch(MODULE_PATH + ".current_app")
|
||||
class TestActiveTasksCount(TestCase):
|
||||
def test_should_return_correct_count_when_no_active_tasks(self, mock_current_app):
|
||||
# given
|
||||
mock_current_app.control.inspect.return_value.active.return_value = {
|
||||
"queue": []
|
||||
}
|
||||
# when
|
||||
result = active_tasks_count()
|
||||
# then
|
||||
self.assertEqual(result, 0)
|
||||
|
||||
def test_should_return_correct_task_count_for_active_tasks(self, mock_current_app):
|
||||
# given
|
||||
mock_current_app.control.inspect.return_value.active.return_value = {
|
||||
"queue": [1, 2, 3]
|
||||
}
|
||||
# when
|
||||
result = active_tasks_count()
|
||||
# then
|
||||
self.assertEqual(result, 3)
|
||||
|
||||
def test_should_return_correct_task_count_for_multiple_queues(
|
||||
self, mock_current_app
|
||||
):
|
||||
# given
|
||||
mock_current_app.control.inspect.return_value.active.return_value = {
|
||||
"queue_1": [1, 2],
|
||||
"queue_2": [3, 4],
|
||||
}
|
||||
# when
|
||||
result = active_tasks_count()
|
||||
# then
|
||||
self.assertEqual(result, 4)
|
||||
|
||||
def test_should_return_none_when_celery_not_available(self, mock_current_app):
|
||||
# given
|
||||
mock_current_app.control.inspect.return_value.active.return_value = None
|
||||
# when
|
||||
result = active_tasks_count()
|
||||
# then
|
||||
self.assertIsNone(result)
|
||||
|
||||
|
||||
@patch(MODULE_PATH + ".current_app")
|
||||
class TestQueuedTasksCount(TestCase):
|
||||
def test_should_return_queue_length_when_queue_exists(self, mock_current_app):
|
||||
# given
|
||||
mock_conn = (
|
||||
mock_current_app.connection_or_acquire.return_value.__enter__.return_value
|
||||
)
|
||||
mock_conn.default_channel.queue_declare.return_value.message_count = 7
|
||||
# when
|
||||
result = queued_tasks_count()
|
||||
# then
|
||||
self.assertEqual(result, 7)
|
||||
|
||||
def test_should_return_0_when_queue_does_not_exists(self, mock_current_app):
|
||||
# given
|
||||
mock_current_app.connection_or_acquire.side_effect = ChannelError
|
||||
# when
|
||||
result = queued_tasks_count()
|
||||
# then
|
||||
self.assertEqual(result, 0)
|
||||
|
||||
def test_should_return_None_on_other_errors(self, mock_current_app):
|
||||
# given
|
||||
mock_current_app.connection_or_acquire.side_effect = RuntimeError
|
||||
# when
|
||||
result = queued_tasks_count()
|
||||
# then
|
||||
self.assertIsNone(result)
|
||||
@@ -9,12 +9,8 @@ from django.core.cache import cache
|
||||
from django.test import TestCase
|
||||
|
||||
from allianceauth.templatetags.admin_status import (
|
||||
status_overview,
|
||||
_fetch_list_from_gitlab,
|
||||
_current_notifications,
|
||||
_current_version_summary,
|
||||
_fetch_notification_issues_from_gitlab,
|
||||
_latests_versions
|
||||
_current_notifications, _current_version_summary, _fetch_list_from_gitlab,
|
||||
_fetch_notification_issues_from_gitlab, _latests_versions, status_overview,
|
||||
)
|
||||
|
||||
MODULE_PATH = 'allianceauth.templatetags'
|
||||
@@ -56,14 +52,10 @@ TEST_VERSION = '2.6.5'
|
||||
|
||||
class TestStatusOverviewTag(TestCase):
|
||||
@patch(MODULE_PATH + '.admin_status.__version__', TEST_VERSION)
|
||||
@patch(MODULE_PATH + '.admin_status._fetch_celery_queue_length')
|
||||
@patch(MODULE_PATH + '.admin_status._current_version_summary')
|
||||
@patch(MODULE_PATH + '.admin_status._current_notifications')
|
||||
def test_status_overview(
|
||||
self,
|
||||
mock_current_notifications,
|
||||
mock_current_version_info,
|
||||
mock_fetch_celery_queue_length
|
||||
self, mock_current_notifications, mock_current_version_info
|
||||
):
|
||||
# given
|
||||
notifications = {
|
||||
@@ -82,7 +74,6 @@ class TestStatusOverviewTag(TestCase):
|
||||
'latest_beta_version': '2.4.4a1',
|
||||
}
|
||||
mock_current_version_info.return_value = version_info
|
||||
mock_fetch_celery_queue_length.return_value = 3
|
||||
# when
|
||||
result = status_overview()
|
||||
# then
|
||||
@@ -96,7 +87,6 @@ class TestStatusOverviewTag(TestCase):
|
||||
self.assertEqual(result["latest_minor_version"], '2.4.0')
|
||||
self.assertEqual(result["latest_patch_version"], '2.4.5')
|
||||
self.assertEqual(result["latest_beta_version"], '2.4.4a1')
|
||||
self.assertEqual(result["task_queue_length"], 3)
|
||||
|
||||
|
||||
class TestNotifications(TestCase):
|
||||
|
||||
39
allianceauth/authentication/tests/test_views.py
Normal file
39
allianceauth/authentication/tests/test_views.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import RequestFactory, TestCase
|
||||
|
||||
from allianceauth.authentication.views import task_counts
|
||||
from allianceauth.tests.auth_utils import AuthUtils
|
||||
|
||||
MODULE_PATH = "allianceauth.authentication.views"
|
||||
|
||||
|
||||
def jsonresponse_to_dict(response) -> dict:
|
||||
return json.loads(response.content)
|
||||
|
||||
|
||||
@patch(MODULE_PATH + ".queued_tasks_count")
|
||||
@patch(MODULE_PATH + ".active_tasks_count")
|
||||
class TestRunningTasksCount(TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls) -> None:
|
||||
super().setUpClass()
|
||||
cls.factory = RequestFactory()
|
||||
cls.user = AuthUtils.create_user("bruce_wayne")
|
||||
|
||||
def test_should_return_data(
|
||||
self, mock_active_tasks_count, mock_queued_tasks_count
|
||||
):
|
||||
# given
|
||||
mock_active_tasks_count.return_value = 2
|
||||
mock_queued_tasks_count.return_value = 3
|
||||
request = self.factory.get("/")
|
||||
request.user = self.user
|
||||
# when
|
||||
response = task_counts(request)
|
||||
# then
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertDictEqual(
|
||||
jsonresponse_to_dict(response), {"tasks_running": 2, "tasks_queued": 3}
|
||||
)
|
||||
@@ -38,4 +38,5 @@ urlpatterns = [
|
||||
name='token_refresh'
|
||||
),
|
||||
path('dashboard/', views.dashboard, name='dashboard'),
|
||||
path('task-counts/', views.task_counts, name='task_counts'),
|
||||
]
|
||||
|
||||
@@ -1,31 +1,31 @@
|
||||
import logging
|
||||
|
||||
from django_registration.backends.activation.views import (
|
||||
REGISTRATION_SALT, ActivationView as BaseActivationView,
|
||||
RegistrationView as BaseRegistrationView,
|
||||
)
|
||||
from django_registration.signals import user_registered
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth import login, authenticate
|
||||
from django.contrib.auth import authenticate, login
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.contrib.auth.models import User
|
||||
from django.core import signing
|
||||
from django.core.mail import EmailMultiAlternatives
|
||||
from django.http import JsonResponse
|
||||
from django.shortcuts import redirect, render
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from allianceauth.eveonline.models import EveCharacter
|
||||
from esi.decorators import token_required
|
||||
from esi.models import Token
|
||||
|
||||
from django_registration.backends.activation.views import (
|
||||
RegistrationView as BaseRegistrationView,
|
||||
ActivationView as BaseActivationView,
|
||||
REGISTRATION_SALT
|
||||
)
|
||||
from django_registration.signals import user_registered
|
||||
from allianceauth.eveonline.models import EveCharacter
|
||||
|
||||
from .models import CharacterOwnership
|
||||
from .core.celery_workers import active_tasks_count, queued_tasks_count
|
||||
from .forms import RegistrationForm
|
||||
from .models import CharacterOwnership
|
||||
|
||||
if 'allianceauth.eveonline.autogroups' in settings.INSTALLED_APPS:
|
||||
_has_auto_groups = True
|
||||
@@ -61,6 +61,7 @@ def dashboard(request):
|
||||
}
|
||||
return render(request, 'authentication/dashboard.html', context)
|
||||
|
||||
|
||||
@login_required
|
||||
def token_management(request):
|
||||
tokens = request.user.token_set.all()
|
||||
@@ -70,6 +71,7 @@ def token_management(request):
|
||||
}
|
||||
return render(request, 'authentication/tokens.html', context)
|
||||
|
||||
|
||||
@login_required
|
||||
def token_delete(request, token_id=None):
|
||||
try:
|
||||
@@ -83,6 +85,7 @@ def token_delete(request, token_id=None):
|
||||
messages.warning(request, "Token does not exist")
|
||||
return redirect('authentication:token_management')
|
||||
|
||||
|
||||
@login_required
|
||||
def token_refresh(request, token_id=None):
|
||||
try:
|
||||
@@ -127,7 +130,7 @@ def main_character_change(request, token):
|
||||
def add_character(request, token):
|
||||
if CharacterOwnership.objects.filter(character__character_id=token.character_id).filter(
|
||||
owner_hash=token.character_owner_hash).filter(user=request.user).exists():
|
||||
messages.success(request, _('Added %(name)s to your account.'% ({'name': token.character_name})))
|
||||
messages.success(request, _('Added %(name)s to your account.' % ({'name': token.character_name})))
|
||||
else:
|
||||
messages.error(request, _('Failed to add %(name)s to your account: they already have an account.' % ({'name': token.character_name})))
|
||||
return redirect('authentication:dashboard')
|
||||
@@ -268,8 +271,11 @@ class ActivationView(BaseActivationView):
|
||||
|
||||
def validate_key(self, activation_key):
|
||||
try:
|
||||
dump = signing.loads(activation_key, salt=REGISTRATION_SALT,
|
||||
max_age=settings.ACCOUNT_ACTIVATION_DAYS * 86400)
|
||||
dump = signing.loads(
|
||||
activation_key,
|
||||
salt=REGISTRATION_SALT,
|
||||
max_age=settings.ACCOUNT_ACTIVATION_DAYS * 86400
|
||||
)
|
||||
return dump
|
||||
except signing.BadSignature:
|
||||
return None
|
||||
@@ -299,3 +305,12 @@ def activation_complete(request):
|
||||
def registration_closed(request):
|
||||
messages.error(request, _('Registration of new accounts is not allowed at this time.'))
|
||||
return redirect('authentication:login')
|
||||
|
||||
|
||||
def task_counts(request) -> JsonResponse:
|
||||
"""Return task counts as JSON for an AJAX call."""
|
||||
data = {
|
||||
"tasks_running": active_tasks_count(),
|
||||
"tasks_queued": queued_tasks_count()
|
||||
}
|
||||
return JsonResponse(data)
|
||||
|
||||
@@ -13,6 +13,10 @@ app = Celery('{{ project_name }}')
|
||||
# the configuration object to child processes.
|
||||
app.config_from_object('django.conf:settings')
|
||||
|
||||
# Automatically try to establish the connection to the AMQP broker on
|
||||
# Celery startup if it is unavailable.
|
||||
app.conf.broker_connection_retry_on_startup = True
|
||||
|
||||
# setup priorities ( 0 Highest, 9 Lowest )
|
||||
app.conf.broker_transport_options = {
|
||||
'priority_steps': list(range(10)), # setup que to have 10 steps
|
||||
|
||||
@@ -32,10 +32,13 @@ INSTALLED_APPS += [
|
||||
# To change the logging level for extensions, uncomment the following line.
|
||||
# LOGGING['handlers']['extension_file']['level'] = 'DEBUG'
|
||||
|
||||
# By default apps are prevented from having public views for security reasons.
|
||||
# If you want to allow specific apps to have public views
|
||||
# you can put there names here (same name as in INSTALLED_APPS):
|
||||
APPS_WITH_PUBLIC_VIEWS = []
|
||||
# By default, apps are prevented from having public views for security reasons.
|
||||
# To allow specific apps to have public views, add them to APPS_WITH_PUBLIC_VIEWS
|
||||
# » The format is the same as in INSTALLED_APPS
|
||||
# » The app developer must also explicitly allow public views for their app
|
||||
APPS_WITH_PUBLIC_VIEWS = [
|
||||
|
||||
]
|
||||
|
||||
# Enter credentials to use MySQL/MariaDB. Comment out to use sqlite3
|
||||
DATABASES['default'] = {
|
||||
|
||||
@@ -6,6 +6,7 @@ from ...admin import ServicesUserAdmin
|
||||
from . import __title__
|
||||
from .models import DiscordUser
|
||||
from .utils import LoggerAddTag
|
||||
from .auth_hooks import DiscordService
|
||||
|
||||
logger = LoggerAddTag(logging.getLogger(__name__), __title__)
|
||||
|
||||
@@ -27,6 +28,6 @@ class DiscordUserAdmin(ServicesUserAdmin):
|
||||
|
||||
@admin.display(description='Discord Username', ordering='username')
|
||||
def _username(self, obj):
|
||||
if obj.username and obj.discriminator:
|
||||
return f'{obj.username}#{obj.discriminator}'
|
||||
return ''
|
||||
return DiscordService.get_discord_username(
|
||||
username=obj.username, discriminator=obj.discriminator
|
||||
)
|
||||
|
||||
@@ -30,6 +30,29 @@ class DiscordService(ServicesHook):
|
||||
self.access_perm = 'discord.access_discord'
|
||||
self.name_format = '{character_name}'
|
||||
|
||||
@staticmethod
|
||||
def get_discord_username(username:str, discriminator:str) -> str:
|
||||
"""
|
||||
Determine the Discord username (Old and new format)
|
||||
:param username:
|
||||
:type username:
|
||||
:param discriminator:
|
||||
:type discriminator:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
|
||||
if username and discriminator:
|
||||
discord_username = f'{username}#{discriminator}'
|
||||
|
||||
# New Discord user name format
|
||||
if discriminator == '0':
|
||||
discord_username = f'@{username}'
|
||||
else:
|
||||
discord_username = ''
|
||||
|
||||
return discord_username
|
||||
|
||||
def delete_user(self, user: User, notify_user: bool = False) -> None:
|
||||
if self.user_has_account(user):
|
||||
logger.debug('Deleting user %s %s account', user, self.name)
|
||||
@@ -43,10 +66,19 @@ class DiscordService(ServicesHook):
|
||||
user_has_account = True
|
||||
username = request.user.discord.username
|
||||
discriminator = request.user.discord.discriminator
|
||||
if username and discriminator:
|
||||
discord_username = f'{username}#{discriminator}'
|
||||
else:
|
||||
discord_username = ''
|
||||
|
||||
discord_username = self.get_discord_username(
|
||||
username=username, discriminator=discriminator
|
||||
)
|
||||
|
||||
# if username and discriminator:
|
||||
# discord_username = f'{username}#{discriminator}'
|
||||
#
|
||||
# # New Discord user name format
|
||||
# if discriminator == '0':
|
||||
# discord_username = f'@{username}'
|
||||
# else:
|
||||
# discord_username = ''
|
||||
else:
|
||||
discord_username = ''
|
||||
user_has_account = False
|
||||
|
||||
@@ -150,3 +150,23 @@ class TestDiscordService(NoSocketsTestCase):
|
||||
self.assertTemplateUsed(service.service_ctrl_template)
|
||||
self.assertIn('/discord/reset/', response)
|
||||
self.assertIn('/discord/deactivate/', response)
|
||||
|
||||
def test_new_discord_username_format(self):
|
||||
"""
|
||||
Test if we get Discord's new username format
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
|
||||
# given
|
||||
username = 'william_riker'
|
||||
discriminator = '0' # Seems to be returned as 0 for Discord's new username format
|
||||
|
||||
# when
|
||||
discord_username = DiscordService.get_discord_username(
|
||||
username=username, discriminator=discriminator
|
||||
)
|
||||
|
||||
# then
|
||||
expected_username = '@william_riker'
|
||||
self.assertEqual(first=discord_username, second=expected_username)
|
||||
|
||||
@@ -122,7 +122,7 @@ ul.list-group.list-group-horizontal > li.list-group-item {
|
||||
padding-top: 0.5rem;
|
||||
}
|
||||
|
||||
.navbar-nav > li.top-user-menu.with-main-character a {
|
||||
.navbar-nav > li.top-user-menu a {
|
||||
padding: 14px;
|
||||
}
|
||||
|
||||
|
||||
@@ -92,12 +92,8 @@
|
||||
{% include "allianceauth/admin-status/celery_bar_partial.html" with label="failed" level="danger" tasks_count=tasks_failed %}
|
||||
</div>
|
||||
<p>
|
||||
{% blocktranslate with running_count=tasks_running|default_if_none:"?"|intcomma %}
|
||||
{{ running_count }} running |
|
||||
{% endblocktranslate %}
|
||||
{% blocktranslate with queue_length=task_queue_length|default_if_none:"?"|intcomma %}
|
||||
{{ queue_length }} queued
|
||||
{% endblocktranslate %}
|
||||
<span id="task-counts">?</span> {% translate 'running' %} |
|
||||
<span id="queued-tasks-count">?</span> {% translate 'queued' %}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -105,3 +101,36 @@
|
||||
</div>
|
||||
<div class="clearfix"></div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const elemRunning = document.getElementById("task-counts");
|
||||
const elemQueued = document.getElementById("queued-tasks-count");
|
||||
|
||||
fetch('{% url "authentication:task_counts" %}')
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
return response.json();
|
||||
}
|
||||
throw new Error("Something went wrong");
|
||||
})
|
||||
.then((responseJson) => {
|
||||
const running = responseJson.tasks_running;
|
||||
if (running == null) {
|
||||
elemRunning.textContent = "N/A";
|
||||
} else {
|
||||
elemRunning.textContent = running.toLocaleString();
|
||||
}
|
||||
|
||||
const queued = responseJson.tasks_queued;
|
||||
if (queued == null) {
|
||||
elemQueued.textContent = "N/A";
|
||||
} else {
|
||||
elemQueued.textContent = queued.toLocaleString();
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.log(error);
|
||||
elemRunning.textContent = "ERROR";
|
||||
elemQueued.textContent = "ERROR";
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
{% load navactive %}
|
||||
{% load auth_notifications %}
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<html lang="{{ LANGUAGE_CODE }}">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
@@ -21,33 +21,37 @@
|
||||
</head>
|
||||
|
||||
<body class="{% if NIGHT_MODE %}template-dark-mode{% else %}template-light-mode{% endif %}">
|
||||
{% if user.is_authenticated %}
|
||||
<div id="wrapper" class="container">
|
||||
<!-- Navigation -->
|
||||
{% include 'allianceauth/top-menu.html' %}
|
||||
<div class="row" id="site-body-wrapper">
|
||||
<div id="wrapper" class="container">
|
||||
<!-- Navigation -->
|
||||
{% include 'allianceauth/top-menu.html' %}
|
||||
|
||||
<div class="clearfix{% if user.is_authenticated %} row{% endif %}" id="site-body-wrapper">
|
||||
{% if user.is_authenticated %}
|
||||
{% include 'allianceauth/side-menu.html' %}
|
||||
<div class="col-sm-10">
|
||||
{% include 'allianceauth/messages.html' %}
|
||||
{% block content %}
|
||||
{% endblock content %}
|
||||
</div>
|
||||
<div class="clearfix"></div>
|
||||
{% endif %}
|
||||
|
||||
<div class="{% if user.is_authenticated %}col-sm-10{% else %}col-sm-12{% endif %}">
|
||||
{% include 'allianceauth/messages.html' %}
|
||||
|
||||
{% block content %}
|
||||
{% endblock content %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% include 'bundles/bootstrap-js.html' %}
|
||||
{% include 'bundles/jquery-visibility-js.html' %}
|
||||
|
||||
<script>
|
||||
let notificationUPdateSettings = {
|
||||
notificationsListViewUrl: "{% url 'notifications:list' %}",
|
||||
notificationsRefreshTime: "{% notifications_refresh_time %}",
|
||||
userNotificationsCountViewUrl: "{% url 'notifications:user_notifications_count' request.user.pk %}"
|
||||
};
|
||||
</script>
|
||||
{% include 'bundles/refresh-notifications-js.html' %}
|
||||
{% if user.is_authenticated %}
|
||||
<script>
|
||||
let notificationUPdateSettings = {
|
||||
notificationsListViewUrl: "{% url 'notifications:list' %}",
|
||||
notificationsRefreshTime: "{% notifications_refresh_time %}",
|
||||
userNotificationsCountViewUrl: "{% url 'notifications:user_notifications_count' request.user.pk %}"
|
||||
};
|
||||
</script>
|
||||
{% include 'bundles/refresh-notifications-js.html' %}
|
||||
{% endif %}
|
||||
{% include 'bundles/evetime-js.html' %}
|
||||
|
||||
{% block extra_javascript %}
|
||||
|
||||
@@ -11,7 +11,10 @@
|
||||
</span>
|
||||
{% endwith %}
|
||||
{% else %}
|
||||
{% translate "User Menu" %}
|
||||
<img class="img-rounded ra-avatar" src="{{ 1|character_portrait_url:32 }}" alt="{{ main.character_name }}">
|
||||
<span class="hidden-sm hidden-md hidden-lg">
|
||||
{% translate "User Menu" %}
|
||||
</span>
|
||||
{% endif %}
|
||||
<span class="caret"></span>
|
||||
</a>
|
||||
|
||||
@@ -22,9 +22,12 @@
|
||||
<li class="nav-item-eve-time">
|
||||
<div class="eve-time-wrapper">{% translate "Eve Time" %}: <span class="eve-time-clock"></span></div>
|
||||
</li>
|
||||
<li class="{% navactive request 'notifications:' %}" id="menu_item_notifications">
|
||||
{% include 'allianceauth/notifications_menu_item.html' %}
|
||||
</li>
|
||||
|
||||
{% if user.is_authenticated %}
|
||||
<li class="{% navactive request 'notifications:' %}" id="menu_item_notifications">
|
||||
{% include 'allianceauth/notifications_menu_item.html' %}
|
||||
</li>
|
||||
{% endif %}
|
||||
|
||||
{% include 'allianceauth/top-menu-user-dropdown.html' %}
|
||||
</ul>
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import amqp.exceptions
|
||||
import requests
|
||||
from celery.app import app_or_default
|
||||
from packaging.version import InvalidVersion, Version as Pep440Version
|
||||
|
||||
from django import template
|
||||
@@ -11,8 +8,9 @@ from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
|
||||
from allianceauth import __version__
|
||||
|
||||
from ..authentication.task_statistics.counters import dashboard_results
|
||||
from allianceauth.authentication.task_statistics.counters import (
|
||||
dashboard_results,
|
||||
)
|
||||
|
||||
register = template.Library()
|
||||
|
||||
@@ -48,18 +46,15 @@ def status_overview() -> dict:
|
||||
response = {
|
||||
"notifications": list(),
|
||||
"current_version": __version__,
|
||||
"task_queue_length": None,
|
||||
"tasks_succeeded": 0,
|
||||
"tasks_retried": 0,
|
||||
"tasks_failed": 0,
|
||||
"tasks_total": 0,
|
||||
"tasks_hours": 0,
|
||||
"earliest_task": None,
|
||||
"tasks_running": 0
|
||||
}
|
||||
response.update(_current_notifications())
|
||||
response.update(_current_version_summary())
|
||||
response.update({'task_queue_length': _fetch_celery_queue_length()})
|
||||
response.update(_celery_stats())
|
||||
return response
|
||||
|
||||
@@ -74,27 +69,9 @@ def _celery_stats() -> dict:
|
||||
"tasks_total": results.total,
|
||||
"tasks_hours": results.hours,
|
||||
"earliest_task": results.earliest_task,
|
||||
"tasks_running": results.running,
|
||||
}
|
||||
|
||||
|
||||
def _fetch_celery_queue_length() -> Optional[int]:
|
||||
try:
|
||||
app = app_or_default(None)
|
||||
with app.connection_or_acquire() as conn:
|
||||
result = conn.default_channel.queue_declare(
|
||||
queue=getattr(settings, 'CELERY_DEFAULT_QUEUE', 'celery'),
|
||||
passive=True
|
||||
)
|
||||
return result.message_count
|
||||
except amqp.exceptions.ChannelError:
|
||||
# Queue doesn't exist, probably empty
|
||||
return 0
|
||||
except Exception:
|
||||
logger.exception("Failed to get celery queue length")
|
||||
return None
|
||||
|
||||
|
||||
def _current_notifications() -> dict:
|
||||
"""returns the newest 5 announcement issues"""
|
||||
try:
|
||||
|
||||
65
allianceauth/utils/counters.py
Normal file
65
allianceauth/utils/counters.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Counters."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from redis import Redis
|
||||
|
||||
from django.core.cache import cache
|
||||
|
||||
from .cache import get_redis_client
|
||||
|
||||
|
||||
class ItemCounter:
|
||||
"""A process safe item counter.
|
||||
|
||||
Args:
|
||||
- name: Unique name for the counter
|
||||
- minimum: Counter can not go below the minimum, when set
|
||||
- redis: A Redis client. Will use AA's cache client by default
|
||||
"""
|
||||
|
||||
CACHE_KEY_BASE = "allianceauth-item-counter"
|
||||
DEFAULT_CACHE_TIMEOUT = 24 * 3600
|
||||
|
||||
def __init__(
|
||||
self, name: str, minimum: Optional[int] = None, redis: Optional[Redis] = None
|
||||
) -> None:
|
||||
if not name:
|
||||
raise ValueError("Must define a name")
|
||||
|
||||
self._name = str(name)
|
||||
self._minimum = minimum
|
||||
self._redis = get_redis_client() if not redis else redis
|
||||
|
||||
@property
|
||||
def _cache_key(self) -> str:
|
||||
return f"{self.CACHE_KEY_BASE}-{self._name}"
|
||||
|
||||
def reset(self, init_value: int = 0):
|
||||
"""Reset counter to initial value."""
|
||||
with self._redis.lock(f"{self.CACHE_KEY_BASE}-reset"):
|
||||
if self._minimum is not None and init_value < self._minimum:
|
||||
raise ValueError("Can not reset below minimum")
|
||||
|
||||
cache.set(self._cache_key, init_value, self.DEFAULT_CACHE_TIMEOUT)
|
||||
|
||||
def incr(self, delta: int = 1):
|
||||
"""Increment counter by delta."""
|
||||
try:
|
||||
cache.incr(self._cache_key, delta)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def decr(self, delta: int = 1):
|
||||
"""Decrement counter by delta."""
|
||||
with self._redis.lock(f"{self.CACHE_KEY_BASE}-decr"):
|
||||
if self._minimum is not None and self.value() == self._minimum:
|
||||
return
|
||||
try:
|
||||
cache.decr(self._cache_key, delta)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def value(self) -> Optional[int]:
|
||||
"""Return current value or None if not yet initialized."""
|
||||
return cache.get(self._cache_key)
|
||||
@@ -1,6 +1,9 @@
|
||||
from unittest import TestCase
|
||||
from unittest.mock import patch
|
||||
|
||||
from allianceauth.authentication.task_statistics.helpers import ItemCounter
|
||||
from allianceauth.utils.counters import ItemCounter
|
||||
|
||||
MODULE_PATH = "allianceauth.utils.counters"
|
||||
|
||||
COUNTER_NAME = "test-counter"
|
||||
|
||||
@@ -72,3 +75,46 @@ class TestItemCounter(TestCase):
|
||||
counter.decr(1)
|
||||
# then
|
||||
self.assertEqual(counter.value(), -1)
|
||||
|
||||
def test_can_not_decrement_counter_below_minimum(self):
|
||||
# given
|
||||
counter = ItemCounter(COUNTER_NAME, minimum=0)
|
||||
counter.reset(0)
|
||||
# when
|
||||
counter.decr(1)
|
||||
# then
|
||||
self.assertEqual(counter.value(), 0)
|
||||
|
||||
def test_can_not_reset_counter_below_minimum(self):
|
||||
# given
|
||||
counter = ItemCounter(COUNTER_NAME, minimum=0)
|
||||
# when/then
|
||||
with self.assertRaises(ValueError):
|
||||
counter.reset(-1)
|
||||
|
||||
def test_can_not_init_without_name(self):
|
||||
# when/then
|
||||
with self.assertRaises(ValueError):
|
||||
ItemCounter(name="")
|
||||
|
||||
def test_can_ignore_invalid_values_when_incrementing(self):
|
||||
# given
|
||||
counter = ItemCounter(COUNTER_NAME)
|
||||
counter.reset(0)
|
||||
# when
|
||||
with patch(MODULE_PATH + ".cache.incr") as m:
|
||||
m.side_effect = ValueError
|
||||
counter.incr()
|
||||
# then
|
||||
self.assertEqual(counter.value(), 0)
|
||||
|
||||
def test_can_ignore_invalid_values_when_decrementing(self):
|
||||
# given
|
||||
counter = ItemCounter(COUNTER_NAME)
|
||||
counter.reset(1)
|
||||
# when
|
||||
with patch(MODULE_PATH + ".cache.decr") as m:
|
||||
m.side_effect = ValueError
|
||||
counter.decr()
|
||||
# then
|
||||
self.assertEqual(counter.value(), 1)
|
||||
@@ -1,7 +1,7 @@
|
||||
PROTOCOL=https://
|
||||
AUTH_SUBDOMAIN=%AUTH_SUBDOMAIN%
|
||||
DOMAIN=%DOMAIN%
|
||||
AA_DOCKER_TAG=registry.gitlab.com/allianceauth/allianceauth/auth:v3.6.0
|
||||
AA_DOCKER_TAG=registry.gitlab.com/allianceauth/allianceauth/auth:v3.7.1
|
||||
|
||||
# Nginx Proxy Manager
|
||||
PROXY_HTTP_PORT=80
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
FROM python:3.9-slim
|
||||
ARG AUTH_VERSION=v3.6.0
|
||||
ARG AUTH_VERSION=v3.7.1
|
||||
ARG AUTH_PACKAGE=allianceauth==${AUTH_VERSION}
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
ENV AUTH_USER=allianceauth
|
||||
|
||||
49
docs/development/aa_core/code-style
Normal file
49
docs/development/aa_core/code-style
Normal file
@@ -0,0 +1,49 @@
|
||||
# Code Style
|
||||
|
||||
## Pre-Commit
|
||||
|
||||
Alliance Auth is a team effort with developers of various skill levels and background. To avoid significant drift or formatting changes between developers we use [pre-commit](https://pre-commit.com/) to apply a very minimal set of formatting checks to code contributed to the project.
|
||||
|
||||
Pre-commit is also very popular with our Community Apps and may be significantly more opinionated or looser depending on the project.
|
||||
|
||||
To get started, `pip install pre-commit`, then `pre-commit install` to add the git hooks.
|
||||
|
||||
Before any code is "git push"-ed, pre-commit will check it for uniformity and correct it if possible
|
||||
|
||||
```bash
|
||||
check python ast.....................................(no files to check)Skipped
|
||||
check yaml...........................................(no files to check)Skipped
|
||||
check json...........................................(no files to check)Skipped
|
||||
check toml...........................................(no files to check)Skipped
|
||||
check xml............................................(no files to check)Skipped
|
||||
check for merge conflicts............................(no files to check)Skipped
|
||||
check for added large files..........................(no files to check)Skipped
|
||||
detect private key...................................(no files to check)Skipped
|
||||
check for case conflicts.............................(no files to check)Skipped
|
||||
debug statements (python)............................(no files to check)Skipped
|
||||
fix python encoding pragma...........................(no files to check)Skipped
|
||||
fix utf-8 byte order marker..........................(no files to check)Skipped
|
||||
mixed line ending....................................(no files to check)Skipped
|
||||
trim trailing whitespace.............................(no files to check)Skipped
|
||||
check that executables have shebangs.................(no files to check)Skipped
|
||||
fix end of files.....................................(no files to check)Skipped
|
||||
Check .editorconfig rules............................(no files to check)Skipped
|
||||
django-upgrade.......................................(no files to check)Skipped
|
||||
pyupgrade............................................(no files to check)Skipped
|
||||
```
|
||||
|
||||
## Editorconfig
|
||||
|
||||
[Editorconfig](https://editorconfig.org/) is supported my most IDE's to streamline the most common editor disparities. While checked by our pre-commit file, using this in your IDE (Either automatically or via a plugin) will minimize the corrections that may need to be made.
|
||||
|
||||
## Doc Strings
|
||||
|
||||
We prefer either [PEP-287](https://peps.python.org/pep-0287/)/[reStructuredText](https://docutils.sourceforge.io/rst.html) or [Google](https://google.github.io/styleguide/pyguide.html#381-docstrings) Docstrings.
|
||||
|
||||
These can be used to automatically generate our Sphinx documentation in either format.
|
||||
|
||||
## Best Practice
|
||||
|
||||
It is advisable to avoid wide formatting changes on code that is not being modified by an MR. Further to this, automated code formatting should be kept to a minimal when modifying sections of existing files.
|
||||
|
||||
If you are contributing whole modules or rewriting large sections of code you may use any legible code formatting valid under Python.
|
||||
@@ -7,4 +7,5 @@ This section contains important information on how to develop Alliance Auth itse
|
||||
:maxdepth: 1
|
||||
|
||||
documentation
|
||||
code-style
|
||||
```
|
||||
|
||||
@@ -399,16 +399,10 @@ Update & install basic tools before installing further Python packages:
|
||||
pip install -U pip setuptools wheel
|
||||
```
|
||||
|
||||
You can install **Alliance Auth** with the following command. This will install AA and all its Python dependencies.
|
||||
You can install **Alliance Auth** with the following command. This will install AA, AA's Python dependencies, superlance for memory monitoring and gunicorn as a wsgi server
|
||||
|
||||
```bash
|
||||
pip install allianceauth
|
||||
```
|
||||
|
||||
You should also install Gunicorn now unless you want to use another WSGI server (see [Gunicorn](#gunicorn) for details):
|
||||
|
||||
```bash
|
||||
pip install gunicorn
|
||||
pip install allianceauth superlance gunicorn
|
||||
```
|
||||
|
||||
#### Create Alliance Auth project
|
||||
|
||||
@@ -44,7 +44,7 @@ You will need to have [Gunicorn](gunicorn.md) or some other WSGI server setup fo
|
||||
|
||||
## Install
|
||||
|
||||
Ubuntu 1804, 2004. 2204:
|
||||
Ubuntu 1804, 2004, 2204:
|
||||
```bash
|
||||
sudo apt-get install nginx
|
||||
```
|
||||
@@ -59,18 +59,13 @@ CentOS Stream 8, Stream 9:
|
||||
sudo dnf install nginx
|
||||
```
|
||||
|
||||
Create a config file in `/etc/nginx/sites-available` and call it `alliance-auth.conf` or whatever your preferred name is.
|
||||
Create a config file in `/etc/nginx/sites-available` (`/etc/nginx/conf.d` on CentOS) and call it `alliance-auth.conf` or whatever your preferred name is.
|
||||
|
||||
Create a symbolic link to enable the site
|
||||
Create a symbolic link to enable the site (not needed on CentOS):
|
||||
```bash
|
||||
ln -s /etc/nginx/sites-available/alliance-auth.conf /etc/nginx/sites-enabled/
|
||||
```
|
||||
|
||||
### CentOS
|
||||
|
||||
Create a config file in `/etc/nginx/conf.d` and call it `alliance-auth.conf` or whatever your preferred name is.
|
||||
|
||||
|
||||
### Basic config
|
||||
|
||||
Copy this basic config into your config file. Make whatever changes you feel are necessary.
|
||||
|
||||
@@ -28,43 +28,11 @@ command=/home/allianceserver/venv/auth/bin/celery -A myauth worker -l info
|
||||
|
||||
Celery workers often have memory leaks and will therefore grow in size over time. While the Alliance Auth team is working hard to ensure Auth is free of memory leaks some may still be cause by bugs in different versions of libraries or community apps. It is therefore good practice to enable features that protect against potential memory leaks.
|
||||
|
||||
There are two ways to protect against memory leaks:
|
||||
|
||||
- Worker
|
||||
- Supervisor
|
||||
|
||||
### Worker
|
||||
|
||||
Celery workers can be configured to automatically restart if they grow above a defined memory threshold. Restarts will be graceful, so current tasks will be allowed to complete before the restart happens.
|
||||
|
||||
To add protection against memory leaks add the following to the command configuration of your worker in the `supervisor.conf` file. This sets the upper limit to 256MB.
|
||||
|
||||
```text
|
||||
--max-memory-per-child 262144
|
||||
```
|
||||
|
||||
Full example:
|
||||
|
||||
```text
|
||||
command=/home/allianceserver/venv/auth/bin/celery -A myauth worker --max-memory-per-child 262144
|
||||
```
|
||||
|
||||
```eval_rst
|
||||
.. hint::
|
||||
The 256 MB limit is just an example and should be adjusted to your system configuration. We would suggest to not go below 128MB though, since new workers start with around 80 MB already. Also take into consideration that this value is per worker and that you properly have more than one worker running in your system (if your workers run as processes, which is the default).
|
||||
The 256 MB limit is just an example and should be adjusted to your system configuration. We would suggest to not go below 128MB though, since new workers start with around 80 MB already. Also take into consideration that this value is per worker and that you may have more than one worker running in your system.
|
||||
```
|
||||
|
||||
```eval_rst
|
||||
.. warning::
|
||||
The ``max-memory-per-child`` parameter only works when workers run as processes (which is the default). It does not work for threads.
|
||||
```
|
||||
|
||||
```eval_rst
|
||||
.. note::
|
||||
Alternatively, you can also limit the number of runs per worker until a restart is performed with the worker parameter ``max-tasks-per-child``. This can also protect against memory leaks if you set the threshold is low enough. However, it is less precise since than using ``max-memory-per-child``.
|
||||
```
|
||||
|
||||
See also the [official Celery documentation](https://docs.celeryproject.org/en/stable/userguide/workers.html#max-memory-per-child-setting) for more information about these two worker parameters.
|
||||
|
||||
### Supervisor
|
||||
|
||||
@@ -78,35 +46,68 @@ To setup install superlance into your venv with:
|
||||
pip install superlance
|
||||
```
|
||||
|
||||
You can then add `memmon` to your `supervisor.conf`. Here is an example setup with a worker that runs with gevent:
|
||||
You can then add `memmon` to your `supervisor.conf`:
|
||||
|
||||
```text
|
||||
[eventlistener:memmon]
|
||||
command=/home/allianceserver/venv/auth/bin/memmon -p worker=512MB
|
||||
command=/home/allianceserver/venv/auth/bin/memmon -p worker=256MB
|
||||
directory=/home/allianceserver/myauth
|
||||
events=TICK_60
|
||||
```
|
||||
|
||||
This setup will check the memory consumption of the program "worker" every 60 secs and automatically restart it if is goes above 512 MB. Note that it will use the stop signal configured in supervisor, which is `TERM` by default. `TERM` will cause a "warm shutdown" of your worker, so all currently running tasks are completed before the restart.
|
||||
This setup will check the memory consumption of the program "worker" every 60 secs and automatically restart it if is goes above 256 MB. Note that it will use the stop signal configured in supervisor, which is `TERM` by default. `TERM` will cause a "warm shutdown" of your worker, so all currently running tasks are completed before the restart.
|
||||
|
||||
Again, the 512 MB is just an example and should be adjusted to fit your system configuration.
|
||||
Again, the 256 MB is just an example and should be adjusted to fit your system configuration.
|
||||
|
||||
## Increasing task throughput
|
||||
|
||||
Celery tasks are designed to run concurrently, so one obvious way to increase task throughput is run more tasks in parallel.
|
||||
Celery tasks are designed to run concurrently, so one obvious way to increase task throughput is run more tasks in parallel. The default celery worker configuration will allow either of these options to be configured out of the box.
|
||||
|
||||
### Extra Worker Threads
|
||||
|
||||
The easiest way to increate throughput can be achieved by increasing the `numprocs` parameter of the suprvisor process. For example:
|
||||
|
||||
```text
|
||||
[program:worker]
|
||||
...
|
||||
numprocs=2
|
||||
process_name=%(program_name)s_%(process_num)02d
|
||||
...
|
||||
```
|
||||
|
||||
This number will be multiplied by your concurrency setting,
|
||||
|
||||
```
|
||||
numprocs * concurency = workers
|
||||
```
|
||||
|
||||
increasing this number will require a modification to the memmon settings as each `numproc` worker will get a unique name for example with `numproc=3`
|
||||
|
||||
```text
|
||||
[eventlistener:memmon]
|
||||
...
|
||||
command=... -p worker_00=256MB -p worker_01=256MB -p worker_02=256MB
|
||||
...
|
||||
```
|
||||
|
||||
```eval_rst
|
||||
.. hint::
|
||||
You will want to experiment with different settings to find the optimal. One way to generate task load and verify your configuration is to run a model update with the following command:
|
||||
|
||||
::
|
||||
|
||||
celery -A myauth call allianceauth.eveonline.tasks.run_model_update
|
||||
|
||||
```
|
||||
|
||||
### Concurrency
|
||||
|
||||
This can be achieved by the setting the concurrency parameter of the celery worker to a higher number. For example:
|
||||
|
||||
```text
|
||||
--concurrency=4
|
||||
--concurrency=10
|
||||
```
|
||||
|
||||
However, there is a catch: In the default configuration each worker will spawn as it's own process. So increasing the number of workers will increase both CPU load and memory consumption in your system.
|
||||
|
||||
The recommended number of workers is one per core, which is what you get automatically with the default configuration. Going beyond that can quickly reduce you overall system performance. i.e. the response time for Alliance Auth or other apps running on the same system may take a hit while many tasks are running.
|
||||
|
||||
```eval_rst
|
||||
.. hint::
|
||||
The optimal number will hugely depend on your individual system configuration and you may want to experiment with different settings to find the optimal. One way to generate task load and verify your configuration is to run a model update with the following command:
|
||||
@@ -117,43 +118,6 @@ The recommended number of workers is one per core, which is what you get automat
|
||||
|
||||
```
|
||||
|
||||
### Processes vs. Threads
|
||||
|
||||
A better way to increase concurrency without impacting is to switch from processes to threads for celery workers. In general celery workers perform better with processes when tasks are primarily CPU bound. And they perform better with threads when tasks that are primarily I/O bound.
|
||||
|
||||
Alliance Auth tasks are primarily I/O bound (most tasks are fetching data from ESI and/or updating the local database), so threads are clearly the better choice for Alliance Auth. However, there is a catch. Celery's out-of-the-box support for threads is limited and additional packages and configurations is required to make it work. Nonetheless, the performance gain - especially in smaller systems - is significant, so it may well be worth the additional configuration complexity.
|
||||
|
||||
```eval_rst
|
||||
.. warning::
|
||||
One important feature that no longer works with threads is the worker parameter ``--max-memory-per-child`` that protects against memory leaks. But you can alternatively use supervisor_ to monitor and restart your workers.
|
||||
```
|
||||
|
||||
See also the also [this guide](https://www.distributedpython.com/2018/10/26/celery-execution-pool/) on more information about how to configure the execution pool for workers.
|
||||
|
||||
### Setting up for threads
|
||||
|
||||
First, you need to install a threads packages. Celery supports both gevent and eventlet. We will go with gevent, since it's newer and better supported. Should you encounter any issues with gevent, you may want to try eventlet.
|
||||
|
||||
To install gevent make sure you are in your venv and install the following:
|
||||
|
||||
```bash
|
||||
pip install gevent
|
||||
```
|
||||
|
||||
Next we need to reconfigure the workers to use gevent threads. For that add the following parameters to your worker config:
|
||||
|
||||
```text
|
||||
--pool=gevent --concurrency=10
|
||||
```
|
||||
|
||||
Full example:
|
||||
|
||||
```text
|
||||
command=/home/allianceserver/venv/auth/bin/celery -A myauth worker --pool=gevent --concurrency=10
|
||||
```
|
||||
|
||||
Make sure to restart supervisor to activate the changes.
|
||||
|
||||
```eval_rst
|
||||
.. hint::
|
||||
The optimal number of concurrent workers will be different for every system and we recommend experimenting with different figures to find the optimal for your system. Note, that the example of 10 threads is conservative and should work even with smaller systems.
|
||||
|
||||
@@ -37,7 +37,7 @@ dependencies = [
|
||||
"celery>=5.2.0,<6",
|
||||
"django-bootstrap-form",
|
||||
"django-celery-beat>=2.3.0",
|
||||
"django-esi>=4.0.1",
|
||||
"django-esi>=5.0.0",
|
||||
"django-redis>=5.2.0",
|
||||
"django-registration>=3.3,<3.4",
|
||||
"django-sortedm2m",
|
||||
|
||||
13
thirdparty/Supervisor/auth-mumble.conf
vendored
13
thirdparty/Supervisor/auth-mumble.conf
vendored
@@ -1,13 +0,0 @@
|
||||
[program:auth-mumble]
|
||||
command=python authenticator.py
|
||||
directory=/home/allianceserver/allianceauth/thirdparty/Mumble
|
||||
user=allianceserver
|
||||
numprocs=1
|
||||
stdout_logfile=/home/allianceserver/allianceauth/log/authenticator.log
|
||||
stderr_logfile=/home/allianceserver/allianceauth/log/authenticator.log
|
||||
autostart=true
|
||||
autorestart=true
|
||||
startsecs=10
|
||||
stopwaitsecs = 600
|
||||
killasgroup=true
|
||||
priority=500
|
||||
28
thirdparty/Supervisor/auth.conf
vendored
28
thirdparty/Supervisor/auth.conf
vendored
@@ -1,28 +0,0 @@
|
||||
[program:celerybeat]
|
||||
command=celery -A alliance_auth beat
|
||||
directory=/home/allianceserver/allianceauth
|
||||
user=allianceserver
|
||||
stdout_logfile=/home/allianceserver/allianceauth/log/beat.log
|
||||
stderr_logfile=/home/allianceserver/allianceauth/log/beat.log
|
||||
autostart=true
|
||||
autorestart=true
|
||||
startsecs=10
|
||||
priority=998
|
||||
|
||||
[program:celeryd]
|
||||
command=celery -A alliance_auth worker
|
||||
directory=/home/allianceserver/allianceauth
|
||||
user=allianceserver
|
||||
numprocs=1
|
||||
stdout_logfile=/home/allianceserver/allianceauth/log/worker.log
|
||||
stderr_logfile=/home/allianceserver/allianceauth/log/worker.log
|
||||
autostart=true
|
||||
autorestart=true
|
||||
startsecs=10
|
||||
stopwaitsecs = 600
|
||||
killasgroup=true
|
||||
priority=998
|
||||
|
||||
[group:auth]
|
||||
programs=celerybeat,celeryd
|
||||
priority=999
|
||||
Reference in New Issue
Block a user