diff --git a/.github/README.md b/.github/README.md
new file mode 100644
index 0000000..de74fb6
--- /dev/null
+++ b/.github/README.md
@@ -0,0 +1,98 @@
+# GitHub Copilot Agent Configuration
+
+This directory contains configuration files for GitHub Copilot and specialized AI agents.
+
+## Files Overview
+
+### Main Instructions
+- **`copilot-instructions.md`** - Main instructions for GitHub Copilot with repository overview, conventions, and guidelines
+
+### Agent-Specific Instructions (`agents/` directory)
+- **`celery-tasks.md`** - Guidelines for developing and maintaining Celery background tasks
+- **`django-development.md`** - Django application development patterns and best practices
+- **`testing.md`** - Testing framework, patterns, and conventions
+- **`documentation.md`** - Documentation standards and writing guidelines
+
+## Purpose
+
+These files provide:
+
+1. **Context for AI Assistants** - Help GitHub Copilot and other AI tools understand the codebase structure and conventions
+2. **Onboarding Documentation** - Guide new developers on project patterns and practices
+3. **Consistency** - Ensure consistent coding style and patterns across the codebase
+4. **Best Practices** - Document proven patterns for common tasks
+
+## Usage
+
+### For GitHub Copilot
+GitHub Copilot automatically reads `.github/copilot-instructions.md` to understand project conventions.
+
+### For Specialized Agents
+Agent-specific instruction files in `.github/agents/` provide detailed guidance for:
+- Celery task development with job tracking
+- Django models, views, admin interface, and management commands
+- Writing comprehensive tests with proper mocking and assertions
+- Creating and maintaining project documentation
+
+## Repository Overview
+
+**impresso-user-admin** is a Django application that manages user-related information for the Impresso project. Key features:
+
+- **Background Processing**: Celery with Redis for asynchronous tasks
+- **User Management**: Django authentication with custom user plans and permissions
+- **Email Notifications**: Multi-format emails (text + HTML) for user actions
+
+## Technology Stack
+
+- Python 3.12+ with type hints
+- Django web framework
+- Celery task queue with Redis
+- MySQL database
+- Docker for containerization
+- pipenv for dependency management
+- mypy for type checking
+
+## Key Concepts
+
+### Task Organization
+- **`impresso/tasks/`** - Celery task definitions with decorators
+- **`impresso/utils/tasks/`** - Helper functions used by tasks
+- Job progress tracking via database and Redis
+- User-based permissions
+
+### User Permissions
+- User groups for different plans (Basic, Researcher, Educational)
+- UserBitmap for fine-grained access control
+- Profile with user-specific settings
+
+### Development Workflow
+```bash
+# Start services
+docker compose up -d
+
+# Run Django server
+ENV=dev pipenv run ./manage.py runserver
+
+# Run Celery worker (separate terminal)
+ENV=dev pipenv run celery -A impresso worker -l info
+
+# Run tests
+ENV=dev pipenv run ./manage.py test
+
+# Type checking
+pipenv run mypy --config-file ./.mypy.ini impresso
+```
+
+## Contributing
+
+When modifying these instruction files:
+1. Keep examples practical and based on actual code in the repository
+2. Update instructions when significant patterns or conventions change
+3. Ensure consistency across all agent instruction files
+4. Test that instructions are clear and actionable
+
+## Resources
+
+- Repository: https://github.com/impresso/impresso-user-admin
+- Impresso Project: https://impresso-project.ch
+- License: GNU Affero General Public License v3.0
diff --git a/.github/agents/celery-tasks.md b/.github/agents/celery-tasks.md
new file mode 100644
index 0000000..78624c8
--- /dev/null
+++ b/.github/agents/celery-tasks.md
@@ -0,0 +1,231 @@
+# Agent: Celery Tasks Development
+
+This agent specializes in developing and maintaining Celery background tasks for the impresso-user-admin Django application.
+
+## Expertise
+
+- Creating new Celery tasks with proper decorators and configuration
+- Writing helper functions for task operations
+- Implementing job progress tracking
+- Managing user permissions and access control
+- Error handling and retry logic
+- Structured logging
+
+## Task Development Guidelines
+
+### Task Definition Structure
+
+All Celery tasks should follow this pattern:
+
+```python
+from celery import shared_task
+from celery.utils.log import get_task_logger
+
+logger = get_task_logger(__name__)
+
+@shared_task(
+ bind=True,
+ autoretry_for=(Exception,),
+ exponential_backoff=2,
+ retry_kwargs={"max_retries": 5},
+ retry_jitter=True,
+)
+def task_name(self, param: type) -> return_type:
+ """
+ Task description.
+
+ Args:
+ param: Description
+
+ Returns:
+ Description
+ """
+ logger.info(f"[context] Starting task with param={param}")
+ # Implementation
+```
+
+### File Organization
+
+- **Task definitions**: Place in `impresso/tasks/`
+ - Use descriptive filenames ending in `_task.py` or `_tasks.py`
+ - Import and use helper functions from utils
+
+- **Helper functions**: Place in `impresso/utils/tasks/`
+ - Reusable logic that can be called by multiple tasks
+ - Database operations, API calls, data processing
+ - Keep helpers stateless and testable
+
+### Job Progress Tracking
+
+For long-running tasks, use the Job model to track progress:
+
+```python
+from impresso.models import Job
+from impresso.utils.tasks import (
+ update_job_progress,
+ update_job_completed,
+ is_task_stopped,
+ TASKSTATE_PROGRESS,
+)
+
+def long_running_task(self, job_id: int):
+ job = Job.objects.get(pk=job_id)
+
+ # Check if user stopped the job
+ if is_task_stopped(task=self, job=job, progress=0.0, logger=logger):
+ return
+
+ # Update progress
+ update_job_progress(
+ task=self,
+ job=job,
+ progress=0.5, # 50%
+ taskstate=TASKSTATE_PROGRESS,
+ extra={"current_step": "processing"},
+ message="Processing data...",
+ logger=logger,
+ )
+
+ # Complete the job
+ update_job_completed(
+ task=self,
+ job=job,
+ extra={"results": "summary"},
+ message="Task completed successfully",
+ logger=logger,
+ )
+```
+
+### Email Operations
+
+Use the email utility functions:
+
+```python
+from impresso.utils.tasks.email import send_templated_email_with_context
+from django.conf import settings
+
+success = send_templated_email_with_context(
+ template='notification_name', # Uses emails/notification_name.txt and .html
+ subject='Email Subject',
+ from_email=f"Impresso Team <{settings.DEFAULT_FROM_EMAIL}>",
+ to=[user.email],
+ cc=[settings.DEFAULT_FROM_EMAIL],
+ reply_to=[settings.DEFAULT_FROM_EMAIL],
+ context={
+ 'user': user,
+ 'custom_data': 'value',
+ },
+ logger=logger,
+ fail_silently=False,
+)
+```
+
+Implement proper error handling with retries:
+
+```python
+from django.db.utils import IntegrityError
+from requests.exceptions import RequestException
+
+@shared_task(
+ bind=True,
+ autoretry_for=(RequestException, IntegrityError),
+ exponential_backoff=2,
+ retry_kwargs={"max_retries": 5},
+ retry_jitter=True,
+)
+def resilient_task(self, param: str):
+ try:
+ # Task logic
+ pass
+ except ValueError as e:
+ # Don't retry validation errors
+ logger.error(f"Validation error: {e}")
+ raise
+ except Exception as e:
+ # Log and let Celery handle retry
+ logger.exception(f"Unexpected error: {e}")
+ raise
+```
+
+### Logging Best Practices
+
+Use structured logging with context:
+
+```python
+# Always include relevant IDs
+logger.info(f"[job:{job.pk} user:{user.pk}] Starting operation")
+
+# Include metrics
+logger.info(
+ f"[job:{job.pk}] Processed {count} items in {qtime}ms "
+ f"(page {page}/{loops}, {progress*100:.2f}%)"
+)
+
+# Use appropriate levels
+logger.debug(f"Debug info: {data}")
+logger.info(f"Operation completed successfully")
+logger.warning(f"Potential issue: {warning}")
+logger.error(f"Error occurred: {error}")
+logger.exception(f"Exception with traceback: {e}") # Includes stack trace
+```
+
+## Testing Tasks
+
+Create tests in `impresso/tests/tasks/`:
+
+```python
+from django.test import TestCase, TransactionTestCase
+from django.contrib.auth.models import User
+from impresso.tasks.my_task import my_task
+from django.core import mail
+
+class TestMyTask(TransactionTestCase):
+ """
+ Test my_task functionality.
+
+ Run with:
+ ENV=dev pipenv run ./manage.py test impresso.tests.tasks.TestMyTask
+ """
+
+ def setUp(self):
+ self.user = User.objects.create_user(
+ username="testuser",
+ email="test@example.com",
+ password="password123"
+ )
+ # Create default groups
+ from impresso.signals import create_default_groups
+ create_default_groups(sender="impresso")
+
+ def test_task_execution(self):
+ # Clear mail outbox
+ mail.outbox = []
+
+ # Run task
+ result = my_task(user_id=self.user.id)
+
+ # Assertions
+ self.assertEqual(result, expected_value)
+ self.assertEqual(len(mail.outbox), 1)
+```
+
+## Configuration Settings
+
+Key Celery settings from `settings.py`:
+
+- `CELERY_BROKER_URL` - Redis connection for Celery
+- `IMPRESSO_GROUP_USER_PLAN_*` - User plan group names
+- `DEFAULT_FROM_EMAIL` - Email sender address
+
+## Key Models
+
+- `Job` - Tracks long-running asynchronous tasks
+- `UserBitmap` - User access permissions as bitmap
+- `UserChangePlanRequest` - Plan upgrade/downgrade requests
+- `UserSpecialMembershipRequest` - Special membership requests
+- `Profile` - User profile with uid
+
+## References
+
+- Celery documentation: https://docs.celeryq.dev/
+- Django documentation: https://docs.djangoproject.com/
diff --git a/.github/agents/django-development.md b/.github/agents/django-development.md
new file mode 100644
index 0000000..14744bd
--- /dev/null
+++ b/.github/agents/django-development.md
@@ -0,0 +1,651 @@
+# Agent: Django Development
+
+This agent specializes in Django application development for the impresso-user-admin project.
+
+## Expertise
+
+- Django models, views, and admin interface
+- User authentication and authorization
+- Django signals and middleware
+- URL routing and template rendering
+- Django management commands
+- Database migrations
+- Form handling and validation
+
+## Django Project Structure
+
+### Apps Organization
+
+The project is organized as a single Django app named `impresso` with the following structure:
+
+```
+impresso/
+├── __init__.py
+├── settings.py # Django settings
+├── base.py # Base settings and dotenv loading
+├── urls.py # URL routing
+├── wsgi.py # WSGI application
+├── celery.py # Celery configuration
+├── models/ # Database models
+├── views/ # View functions/classes
+├── admin/ # Admin customizations
+├── signals.py # Django signals
+├── management/
+│ └── commands/ # Custom management commands
+├── templates/ # HTML templates
+│ └── emails/ # Email templates
+├── static/ # Static files (CSS, JS, images)
+└── tests/ # Test suite
+```
+
+## Models
+
+### Model Conventions
+
+- Use `django.db.models.Model` as base class
+- Define `__str__()` method for readable representations
+- Use `Meta` class for model options
+- Add docstrings to models and complex fields
+- Use Django's built-in field types
+- Define proper relationships (ForeignKey, ManyToMany)
+
+### Key Models
+
+- **User** - Django's built-in User model (from `django.contrib.auth.models`)
+- **Profile** - User profile with `uid`
+- **UserBitmap** - User access permissions as bitmap
+- **Job** - Tracks asynchronous background tasks
+- **UserChangePlanRequest** - Plan upgrade/downgrade requests
+- **UserSpecialMembershipRequest** - Special membership requests
+
+### Model Example
+
+```python
+from django.db import models
+from django.contrib.auth.models import User
+from django.utils import timezone
+
+class MyModel(models.Model):
+ """
+ Description of the model.
+ """
+ # Fields
+ name = models.CharField(max_length=255, help_text="Display name")
+ creator = models.ForeignKey(
+ User,
+ on_delete=models.CASCADE,
+ related_name="mymodels"
+ )
+ date_created = models.DateTimeField(default=timezone.now)
+ is_active = models.BooleanField(default=True)
+
+ class Meta:
+ ordering = ['-date_created']
+ verbose_name = "My Model"
+ verbose_name_plural = "My Models"
+ indexes = [
+ models.Index(fields=['creator', '-date_created']),
+ ]
+
+ def __str__(self):
+ return f"{self.name} (by {self.creator.username})"
+
+ def save(self, *args, **kwargs):
+ """Override save to add custom logic."""
+ # Custom logic before save
+ super().save(*args, **kwargs)
+ # Custom logic after save
+```
+
+## Django Admin
+
+### Admin Customization
+
+Customize the admin interface in `impresso/admin/`:
+
+```python
+from django.contrib import admin
+from impresso.models import MyModel
+
+@admin.register(MyModel)
+class MyModelAdmin(admin.ModelAdmin):
+ """Admin interface for MyModel."""
+
+ list_display = ('name', 'creator', 'date_created', 'is_active')
+ list_filter = ('is_active', 'date_created')
+ search_fields = ('name', 'creator__username')
+ readonly_fields = ('date_created',)
+ date_hierarchy = 'date_created'
+
+ fieldsets = (
+ ('Basic Information', {
+ 'fields': ('name', 'creator', 'is_active')
+ }),
+ ('Metadata', {
+ 'fields': ('date_created',),
+ 'classes': ('collapse',)
+ }),
+ )
+
+ def get_queryset(self, request):
+ """Optimize queryset with select_related."""
+ qs = super().get_queryset(request)
+ return qs.select_related('creator')
+```
+
+### Admin Actions
+
+```python
+@admin.register(MyModel)
+class MyModelAdmin(admin.ModelAdmin):
+ actions = ['activate_items', 'deactivate_items']
+
+ def activate_items(self, request, queryset):
+ """Activate selected items."""
+ count = queryset.update(is_active=True)
+ self.message_user(request, f"{count} items activated.")
+ activate_items.short_description = "Activate selected items"
+
+ def deactivate_items(self, request, queryset):
+ """Deactivate selected items."""
+ count = queryset.update(is_active=False)
+ self.message_user(request, f"{count} items deactivated.")
+ deactivate_items.short_description = "Deactivate selected items"
+```
+
+## Management Commands
+
+### Creating Management Commands
+
+Create custom commands in `impresso/management/commands/`:
+
+```python
+from django.core.management.base import BaseCommand, CommandError
+from django.contrib.auth.models import User
+from impresso.models import MyModel
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class Command(BaseCommand):
+ """
+ Management command description.
+
+ Usage:
+ ENV=dev pipenv run ./manage.py mycommand [options]
+ """
+ help = 'Command description'
+
+ def add_arguments(self, parser):
+ """Add command-line arguments."""
+ parser.add_argument(
+ 'user_id',
+ type=int,
+ help='User ID to process'
+ )
+ parser.add_argument(
+ '--dry-run',
+ action='store_true',
+ help='Run without making changes'
+ )
+ parser.add_argument(
+ '--verbose',
+ action='store_true',
+ help='Verbose output'
+ )
+
+ def handle(self, *args, **options):
+ """Execute command logic."""
+ user_id = options['user_id']
+ dry_run = options['dry_run']
+ verbose = options['verbose']
+
+ # Set logging level
+ if verbose:
+ logger.setLevel(logging.DEBUG)
+
+ try:
+ user = User.objects.get(pk=user_id)
+ logger.info(f"Processing user: {user.username}")
+
+ if dry_run:
+ self.stdout.write(
+ self.style.WARNING('DRY RUN - no changes made')
+ )
+ else:
+ # Do actual work
+ result = self.process_user(user)
+
+ self.stdout.write(
+ self.style.SUCCESS(f'Successfully processed: {result}')
+ )
+
+ except User.DoesNotExist:
+ raise CommandError(f'User with ID {user_id} does not exist')
+
+ except Exception as e:
+ logger.exception(f"Error processing user {user_id}")
+ raise CommandError(f'Error: {e}')
+
+ def process_user(self, user):
+ """Process user logic."""
+ # Implementation
+ return "result"
+```
+
+### Existing Commands
+
+Key management commands in the project:
+
+- `createaccount` - Create user accounts with random passwords
+- `createsuperuser` - Create admin user (built-in Django command)
+- `createcollection` - Create or get a collection
+- `stopjob` - Stop a running job
+- `updateuserbitmap` - Update user bitmap
+- `updatespecialmembership` - Update special membership status
+
+## Settings Management
+
+### Environment-Based Settings
+
+Settings are loaded via dotenv files:
+
+```python
+# impresso/base.py
+import os
+from dotenv import load_dotenv
+
+# Load environment-specific .env file
+env = os.environ.get('ENV', 'dev')
+env_file = f'.{env}.env' if env != 'dev' else '.env'
+load_dotenv(env_file)
+
+# Access settings
+SECRET_KEY = os.environ.get('SECRET_KEY')
+DEBUG = os.environ.get('DEBUG', 'False') == 'True'
+```
+
+### Settings Structure
+
+- `impresso/base.py` - Base settings and dotenv loading
+- `impresso/settings.py` - Main settings file
+- `.example.env` - Template for environment variables
+- `.dev.env` - Development settings
+- `.prod.env` - Production settings
+
+### Key Settings
+
+```python
+# Database
+DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.mysql',
+ 'HOST': os.environ.get('IMPRESSO_DB_HOST'),
+ 'PORT': os.environ.get('IMPRESSO_DB_PORT'),
+ 'NAME': os.environ.get('IMPRESSO_DB_NAME'),
+ 'USER': os.environ.get('IMPRESSO_DB_USER'),
+ 'PASSWORD': os.environ.get('IMPRESSO_DB_PASSWORD'),
+ }
+}
+
+# Celery
+CELERY_BROKER_URL = os.environ.get('REDIS_HOST', 'redis://localhost:6379')
+
+# Email
+EMAIL_BACKEND = os.environ.get('EMAIL_BACKEND')
+DEFAULT_FROM_EMAIL = os.environ.get('DEFAULT_FROM_EMAIL')
+
+# Custom settings
+IMPRESSO_BASE_URL = os.environ.get('IMPRESSO_BASE_URL')
+```
+
+## Django Signals
+
+### Signal Definitions
+
+Signals are defined in `impresso/signals.py`:
+
+```python
+from django.db.models.signals import post_save, pre_save
+from django.dispatch import receiver
+from django.contrib.auth.models import User
+from impresso.models import Profile, UserBitmap
+
+@receiver(post_save, sender=User)
+def create_user_profile(sender, instance, created, **kwargs):
+ """
+ Create Profile and UserBitmap when User is created.
+ """
+ if created:
+ Profile.objects.get_or_create(
+ user=instance,
+ defaults={'uid': f"user-{instance.username}"}
+ )
+ UserBitmap.objects.get_or_create(user=instance)
+
+@receiver(pre_save, sender=UserBitmap)
+def update_user_bitmap(sender, instance, **kwargs):
+ """
+ Update bitmap before saving based on user groups.
+ """
+ # Calculate bitmap value from user groups
+ instance.calculate_bitmap()
+```
+
+### Signal Registration
+
+Signals must be imported in `impresso/__init__.py`:
+
+```python
+default_app_config = 'impresso.apps.ImpressoConfig'
+```
+
+And in `impresso/apps.py`:
+
+```python
+from django.apps import AppConfig
+
+class ImpressoConfig(AppConfig):
+ name = 'impresso'
+
+ def ready(self):
+ """Import signals when app is ready."""
+ import impresso.signals
+```
+
+## User Authentication & Authorization
+
+### User Groups
+
+The project uses Django groups for user plans:
+
+- `settings.IMPRESSO_GROUP_USER_PLAN_BASIC` - Basic user plan
+- `settings.IMPRESSO_GROUP_USER_PLAN_RESEARCHER` - Researcher plan
+- `settings.IMPRESSO_GROUP_USER_PLAN_EDUCATIONAL` - Educational plan
+- `settings.IMPRESSO_GROUP_USER_PLAN_NO_REDACTION` - Special privilege
+
+### Checking User Permissions
+
+```python
+from django.conf import settings
+
+def check_user_plan(user):
+ """Check user's plan."""
+ if user.groups.filter(name=settings.IMPRESSO_GROUP_USER_PLAN_RESEARCHER).exists():
+ return 'researcher'
+ elif user.groups.filter(name=settings.IMPRESSO_GROUP_USER_PLAN_EDUCATIONAL).exists():
+ return 'educational'
+ else:
+ return 'basic'
+
+def user_has_no_redaction(user):
+ """Check if user has no-redaction privilege."""
+ return user.groups.filter(
+ name=settings.IMPRESSO_GROUP_USER_PLAN_NO_REDACTION
+ ).exists()
+```
+
+### User Profile Access
+
+```python
+def get_user_limits(user):
+ """Get user's profile information."""
+ profile = user.profile
+ return {
+ 'uid': profile.uid,
+ }
+```
+
+## Database Migrations
+
+### Creating Migrations
+
+```bash
+# Create migrations for changes
+ENV=dev pipenv run ./manage.py makemigrations
+
+# Create named migration
+ENV=dev pipenv run ./manage.py makemigrations --name add_field_to_model
+
+# Show SQL for migrations
+ENV=dev pipenv run ./manage.py sqlmigrate impresso 0001
+
+# Apply migrations
+ENV=dev pipenv run ./manage.py migrate
+
+# Show migration status
+ENV=dev pipenv run ./manage.py showmigrations
+```
+
+### Migration Best Practices
+
+- Keep migrations small and focused
+- Test migrations on copy of production data
+- Never modify applied migrations
+- Use `RunPython` for data migrations
+- Add `reverse_code` for rollback support
+
+### Data Migration Example
+
+```python
+from django.db import migrations
+
+def forwards_func(apps, schema_editor):
+ """Apply data migration."""
+ MyModel = apps.get_model('impresso', 'MyModel')
+ db_alias = schema_editor.connection.alias
+
+ # Update data
+ MyModel.objects.using(db_alias).filter(
+ old_field=True
+ ).update(new_field='value')
+
+def reverse_func(apps, schema_editor):
+ """Reverse data migration."""
+ MyModel = apps.get_model('impresso', 'MyModel')
+ db_alias = schema_editor.connection.alias
+
+ # Reverse changes
+ MyModel.objects.using(db_alias).filter(
+ new_field='value'
+ ).update(old_field=True)
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ('impresso', '0001_initial'),
+ ]
+
+ operations = [
+ migrations.RunPython(forwards_func, reverse_func),
+ ]
+```
+
+## URL Configuration
+
+URLs are defined in `impresso/urls.py`:
+
+```python
+from django.urls import path, include
+from django.contrib import admin
+from impresso import views
+
+urlpatterns = [
+ path('admin/', admin.site.urls),
+ path('api/', include('impresso.api.urls')),
+ path('accounts/', include('django_registration.backends.activation.urls')),
+]
+```
+
+## Templates
+
+### Template Organization
+
+Templates are in `impresso/templates/`:
+
+```
+templates/
+├── base.html # Base template
+├── emails/ # Email templates
+│ ├── notification.txt # Plain text version
+│ └── notification.html # HTML version
+└── admin/ # Admin overrides
+```
+
+### Email Templates
+
+Email templates should have both .txt and .html versions:
+
+```html
+
+
+
+
+
+
+
+ Dear {{ user.first_name }},
+ {{ message }}
+ Best regards, The Impresso Team
+
+
+```
+
+```text
+# emails/notification.txt
+Dear {{ user.first_name }},
+
+{{ message }}
+
+Best regards,
+The Impresso Team
+```
+
+## Middleware
+
+Custom middleware can be added to `impresso/middleware.py`:
+
+```python
+class CustomMiddleware:
+ """Custom middleware description."""
+
+ def __init__(self, get_response):
+ self.get_response = get_response
+
+ def __call__(self, request):
+ # Code executed before view
+
+ response = self.get_response(request)
+
+ # Code executed after view
+
+ return response
+```
+
+Register in settings:
+
+```python
+MIDDLEWARE = [
+ # Django defaults
+ 'django.middleware.security.SecurityMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ # ...
+ 'impresso.middleware.CustomMiddleware', # Add custom middleware
+]
+```
+
+## Database Optimization
+
+### Query Optimization
+
+```python
+# Use select_related for ForeignKey
+users = User.objects.select_related('profile').all()
+
+# Use prefetch_related for ManyToMany
+users = User.objects.prefetch_related('groups').all()
+
+# Use only() to fetch specific fields
+users = User.objects.only('id', 'username', 'email').all()
+
+# Use defer() to exclude fields
+users = User.objects.defer('password', 'last_login').all()
+
+# Use exists() instead of count() for existence check
+if User.objects.filter(email=email).exists():
+ # ...
+
+# Use values() for dictionary results
+user_data = User.objects.values('id', 'username', 'email')
+```
+
+### Database Transactions
+
+```python
+from django.db import transaction
+
+# Atomic decorator
+@transaction.atomic
+def create_user_with_profile(username, email):
+ """Create user and profile atomically."""
+ user = User.objects.create_user(username=username, email=email)
+ Profile.objects.create(user=user, uid=f"user-{username}")
+ return user
+
+# Context manager
+def update_user_plan(user, plan):
+ """Update user plan atomically."""
+ with transaction.atomic():
+ user.groups.clear()
+ user.groups.add(plan)
+ user.profile.plan_updated = timezone.now()
+ user.profile.save()
+```
+
+## Logging
+
+Configure logging in settings:
+
+```python
+LOGGING = {
+ 'version': 1,
+ 'disable_existing_loggers': False,
+ 'formatters': {
+ 'verbose': {
+ 'format': '{levelname} {asctime} {module} {message}',
+ 'style': '{',
+ },
+ },
+ 'handlers': {
+ 'console': {
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'verbose',
+ },
+ },
+ 'loggers': {
+ 'impresso': {
+ 'handlers': ['console'],
+ 'level': 'INFO',
+ },
+ },
+}
+```
+
+## Security Best Practices
+
+- Use Django's built-in security features
+- Never store plaintext passwords
+- Use CSRF protection for forms
+- Validate and sanitize all user inputs
+- Use Django's ORM to prevent SQL injection
+- Keep SECRET_KEY secret and unique
+- Use HTTPS in production
+- Regularly update dependencies
+
+## References
+
+- Django Documentation: https://docs.djangoproject.com/
+- Django Admin: https://docs.djangoproject.com/en/stable/ref/contrib/admin/
+- Django Management Commands: https://docs.djangoproject.com/en/stable/howto/custom-management-commands/
+- Django Migrations: https://docs.djangoproject.com/en/stable/topics/migrations/
+- Django Signals: https://docs.djangoproject.com/en/stable/topics/signals/
diff --git a/.github/agents/documentation.md b/.github/agents/documentation.md
new file mode 100644
index 0000000..2eb0395
--- /dev/null
+++ b/.github/agents/documentation.md
@@ -0,0 +1,675 @@
+# Agent: Documentation
+
+This agent specializes in creating and maintaining documentation for the impresso-user-admin project.
+
+## Expertise
+
+- Writing clear and comprehensive README files
+- Creating API documentation
+- Documenting code with docstrings
+- Writing setup and deployment guides
+- Creating user guides and tutorials
+- Maintaining changelog
+
+## Documentation Standards
+
+### README Structure
+
+A good README should include:
+
+1. **Project Overview** - Brief description of what the project does
+2. **Features** - Key features and capabilities
+3. **Technology Stack** - Technologies and frameworks used
+4. **Installation** - Step-by-step setup instructions
+5. **Configuration** - Environment variables and settings
+6. **Usage** - How to run and use the application
+7. **Development** - Development setup and workflow
+8. **Testing** - How to run tests
+9. **Deployment** - Production deployment instructions
+10. **Contributing** - Guidelines for contributors
+11. **License** - License information
+12. **Resources** - Links to related resources
+
+### Code Documentation
+
+#### Docstrings
+
+Follow Google-style docstrings for Python:
+
+```python
+def function_name(param1: type1, param2: type2) -> return_type:
+ """
+ Brief description of what the function does.
+
+ Longer description if needed, explaining the function's behavior,
+ edge cases, and any important implementation details.
+
+ Args:
+ param1: Description of param1
+ param2: Description of param2
+
+ Returns:
+ Description of return value
+
+ Raises:
+ ExceptionType: When this exception is raised
+
+ Example:
+ >>> result = function_name(value1, value2)
+ >>> print(result)
+ expected_output
+ """
+ # Implementation
+```
+
+#### Class Documentation
+
+```python
+class ClassName:
+ """
+ Brief description of the class.
+
+ Longer description explaining the class's purpose, relationships
+ with other classes, and usage patterns.
+
+ Attributes:
+ attribute1: Description of attribute1
+ attribute2: Description of attribute2
+
+ Example:
+ >>> obj = ClassName(param)
+ >>> obj.method()
+ expected_output
+ """
+
+ def __init__(self, param: type):
+ """
+ Initialize the class.
+
+ Args:
+ param: Description of initialization parameter
+ """
+ self.attribute1 = param
+```
+
+#### Module Documentation
+
+```python
+"""
+Module Name
+
+Brief description of what this module does.
+
+This module provides functionality for [purpose]. It includes
+classes and functions for [specific capabilities].
+
+Key Components:
+ - ClassName: Description
+ - function_name: Description
+
+Example:
+ Basic usage example:
+
+ >>> from module import ClassName
+ >>> obj = ClassName()
+ >>> result = obj.method()
+"""
+```
+
+## Django Project Documentation
+
+### Settings Documentation
+
+Document important settings in comments:
+
+```python
+# Celery Configuration
+# Redis is used as the message broker for Celery task queue
+CELERY_BROKER_URL = os.environ.get('REDIS_HOST', 'redis://localhost:6379')
+
+# Maximum number of results returned per Solr query
+# This limit prevents excessive resource usage
+IMPRESSO_SOLR_EXEC_LIMIT = 100
+
+# Maximum number of query loops allowed per job
+# This prevents infinite loops and resource exhaustion
+IMPRESSO_SOLR_EXEC_MAX_LOOPS = 100
+```
+
+### Model Documentation
+
+```python
+class Job(models.Model):
+ """
+ Tracks the execution of long-running asynchronous tasks.
+
+ Jobs are created when a user initiates a long-running operation
+ like exporting search results or creating a collection. The job
+ status is updated as the task progresses, allowing users to monitor
+ progress and cancel if needed.
+
+ Status Flow:
+ INIT -> RUN -> DONE (success)
+ INIT -> RUN -> RIP (stopped/failed)
+ """
+
+ # Status constants
+ INIT = 'init' # Job created but not started
+ RUN = 'run' # Job is running
+ DONE = 'done' # Job completed successfully
+ STOP = 'stop' # User requested stop
+ RIP = 'rip' # Job stopped or failed
+
+ STATUS_CHOICES = [
+ (INIT, 'Initialized'),
+ (RUN, 'Running'),
+ (DONE, 'Done'),
+ (STOP, 'Stop Requested'),
+ (RIP, 'Stopped'),
+ ]
+```
+
+### Management Command Documentation
+
+```python
+class Command(BaseCommand):
+ """
+ Create a user account with random password.
+
+ This command creates a new user account with a randomly generated
+ password and assigns them to the default user plan (Basic).
+
+ Usage:
+ ENV=dev pipenv run ./manage.py createaccount EMAIL [EMAIL ...]
+
+ Examples:
+ # Create a single account
+ ENV=dev pipenv run ./manage.py createaccount user@example.com
+
+ # Create multiple accounts at once
+ ENV=dev pipenv run ./manage.py createaccount user1@example.com user2@example.com
+
+ Output:
+ Prints the generated password for each created account.
+ """
+```
+
+## API Documentation
+
+### REST API Endpoints
+
+Document API endpoints with:
+
+- **Method** - HTTP method (GET, POST, PUT, DELETE)
+- **URL** - Endpoint URL with parameters
+- **Auth** - Authentication requirements
+- **Parameters** - Request parameters
+- **Response** - Response format and status codes
+- **Examples** - Request/response examples
+
+```markdown
+### Create Collection
+
+Create a new collection for the authenticated user.
+
+**URL**: `/api/collections/`
+
+**Method**: `POST`
+
+**Auth Required**: Yes
+
+**Permissions**: Authenticated users
+
+**Request Body**:
+```json
+{
+ "name": "My Collection",
+ "description": "Collection description"
+}
+```
+
+**Success Response**:
+- **Code**: 201 CREATED
+- **Content**:
+```json
+{
+ "id": "user-john-my-collection",
+ "name": "My Collection",
+ "description": "Collection description",
+ "date_created": "2024-01-15T10:30:00Z",
+ "creator": {
+ "id": 1,
+ "username": "john"
+ }
+}
+```
+
+**Error Responses**:
+- **Code**: 400 BAD REQUEST
+ - **Content**: `{"name": ["This field is required."]}`
+- **Code**: 401 UNAUTHORIZED
+ - **Content**: `{"detail": "Authentication credentials were not provided."}`
+
+**Example**:
+```bash
+curl -X POST https://api.example.com/api/collections/ \
+ -H "Authorization: Bearer TOKEN" \
+ -H "Content-Type: application/json" \
+ -d '{"name": "My Collection", "description": "Test collection"}'
+```
+```
+
+## Changelog
+
+Maintain a CHANGELOG.md following [Keep a Changelog](https://keepachangelog.com/) format:
+
+```markdown
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [Unreleased]
+
+### Added
+- New feature description
+
+### Changed
+- Changed feature description
+
+### Deprecated
+- Soon-to-be removed feature
+
+### Removed
+- Removed feature
+
+### Fixed
+- Bug fix description
+
+### Security
+- Security fix description
+
+## [1.0.0] - 2024-01-15
+
+### Added
+- Initial release with core features
+- User authentication and authorization
+- Celery task processing
+- Email notification system
+- User bitmap permissions
+
+[Unreleased]: https://github.com/impresso/impresso-user-admin/compare/v1.0.0...HEAD
+[1.0.0]: https://github.com/impresso/impresso-user-admin/releases/tag/v1.0.0
+```
+
+## Setup Documentation
+
+### Installation Guide
+
+```markdown
+## Installation
+
+### Prerequisites
+
+- Python 3.12+
+- pipenv
+- Docker and docker-compose
+- MySQL 8.0+
+- Redis 6.0+
+
+### Step 1: Clone Repository
+
+```bash
+git clone https://github.com/impresso/impresso-user-admin.git
+cd impresso-user-admin
+```
+
+### Step 2: Install Dependencies
+
+```bash
+# Install pyenv if not already installed
+curl https://pyenv.run | bash
+
+# Install Python version
+pyenv install 3.12.4
+
+# Install pipenv
+python -m pip install pipenv
+
+# Install project dependencies
+pipenv install
+```
+
+### Step 3: Configure Environment
+
+```bash
+# Copy example environment file
+cp .example.env .dev.env
+
+# Edit .dev.env with your settings
+nano .dev.env
+```
+
+### Step 4: Start Services
+
+```bash
+# Start Redis and MySQL
+docker compose up -d
+
+# Run migrations
+ENV=dev pipenv run ./manage.py migrate
+
+# Create superuser
+ENV=dev pipenv run ./manage.py createsuperuser
+```
+
+### Step 5: Run Application
+
+```bash
+# Terminal 1: Start Django server
+ENV=dev pipenv run ./manage.py runserver
+
+# Terminal 2: Start Celery worker
+ENV=dev pipenv run celery -A impresso worker -l info
+```
+
+### Step 6: Access Application
+
+- Admin interface: http://localhost:8000/admin/
+- Log in with your superuser credentials
+```
+
+## Configuration Documentation
+
+### Environment Variables
+
+Document all environment variables:
+
+```markdown
+## Environment Variables
+
+### Required Variables
+
+| Variable | Description | Example |
+|----------|-------------|---------|
+| `SECRET_KEY` | Django secret key (keep secret!) | `django-insecure-key123...` |
+| `DEBUG` | Enable debug mode (only in dev) | `True` |
+| `IMPRESSO_DB_HOST` | MySQL database host | `localhost` |
+| `IMPRESSO_DB_PORT` | MySQL database port | `3306` |
+| `IMPRESSO_DB_NAME` | Database name | `impresso` |
+| `IMPRESSO_DB_USER` | Database username | `impresso_user` |
+| `IMPRESSO_DB_PASSWORD` | Database password | `secure_password` |
+| `REDIS_HOST` | Redis connection URL | `redis://localhost:6379` |
+
+### Solr Configuration
+
+| Variable | Description | Example |
+|----------|-------------|---------|
+| `IMPRESSO_SOLR_URL` | Main Solr index URL | `http://localhost:8983/solr/impresso` |
+| `IMPRESSO_SOLR_USER` | Solr read-only user | `reader` |
+| `IMPRESSO_SOLR_PASSWORD` | Solr read-only password | `read_password` |
+| `IMPRESSO_SOLR_USER_WRITE` | Solr write user | `writer` |
+| `IMPRESSO_SOLR_PASSWORD_WRITE` | Solr write password | `write_password` |
+| `IMPRESSO_SOLR_PASSAGES_URL` | Text reuse passages index | `http://localhost:8983/solr/passages` |
+
+### Email Configuration
+
+| Variable | Description | Example |
+|----------|-------------|---------|
+| `EMAIL_BACKEND` | Django email backend | `django.core.mail.backends.smtp.EmailBackend` |
+| `EMAIL_HOST` | SMTP server host | `smtp.gmail.com` |
+| `EMAIL_PORT` | SMTP server port | `587` |
+| `EMAIL_USE_TLS` | Use TLS encryption | `True` |
+| `EMAIL_HOST_USER` | SMTP username | `user@example.com` |
+| `EMAIL_HOST_PASSWORD` | SMTP password | `app_password` |
+| `DEFAULT_FROM_EMAIL` | Default sender email | `noreply@impresso-project.ch` |
+
+### Optional Variables
+
+| Variable | Description | Default | Example |
+|----------|-------------|---------|---------|
+| `IMPRESSO_SOLR_EXEC_LIMIT` | Max rows per Solr query | `100` | `200` |
+| `IMPRESSO_SOLR_EXEC_MAX_LOOPS` | Max query loops | `100` | `200` |
+| `IMPRESSO_BASE_URL` | Base URL for links | - | `https://impresso-project.ch` |
+```
+
+## Troubleshooting Documentation
+
+```markdown
+## Troubleshooting
+
+### Common Issues
+
+#### Database Connection Errors
+
+**Problem**: `django.db.utils.OperationalError: (2003, "Can't connect to MySQL server")`
+
+**Solution**:
+1. Check MySQL is running: `docker ps`
+2. Verify connection settings in `.dev.env`
+3. Test connection: `mysql -h localhost -u user -p`
+
+#### Redis Connection Errors
+
+**Problem**: `redis.exceptions.ConnectionError: Error connecting to Redis`
+
+**Solution**:
+1. Check Redis is running: `docker ps`
+2. Test connection: `redis-cli ping`
+3. Verify `REDIS_HOST` in `.dev.env`
+
+#### Celery Tasks Not Processing
+
+**Problem**: Tasks are queued but not executed
+
+**Solution**:
+1. Check Celery worker is running
+2. Check Redis connection
+3. Verify task is registered: `pipenv run celery -A impresso inspect registered`
+4. Check worker logs for errors
+
+#### Import Errors
+
+**Problem**: `ModuleNotFoundError: No module named 'xyz'`
+
+**Solution**:
+1. Ensure you're in pipenv shell: `pipenv shell`
+2. Install dependencies: `pipenv install`
+3. Check Python version: `python --version`
+
+### Debug Mode
+
+Enable verbose logging:
+
+```python
+# settings.py
+LOGGING = {
+ 'version': 1,
+ 'disable_existing_loggers': False,
+ 'handlers': {
+ 'console': {
+ 'class': 'logging.StreamHandler',
+ },
+ },
+ 'root': {
+ 'handlers': ['console'],
+ 'level': 'DEBUG',
+ },
+}
+```
+```
+
+## Testing Documentation
+
+Document how to run and write tests:
+
+```markdown
+## Testing
+
+### Running Tests
+
+```bash
+# Run all tests
+ENV=dev pipenv run ./manage.py test
+
+# Run specific test module
+ENV=dev pipenv run ./manage.py test impresso.tests.utils.tasks.test_account
+
+# Run with coverage
+ENV=dev pipenv run coverage run --source='impresso' manage.py test
+ENV=dev pipenv run coverage report
+ENV=dev pipenv run coverage html
+
+# Run with verbose output
+ENV=dev pipenv run ./manage.py test --verbosity=2
+```
+
+### Writing Tests
+
+See `.github/agents/testing.md` for comprehensive testing guidelines.
+
+### Test Structure
+
+Tests are organized to mirror the application structure:
+
+```
+impresso/tests/
+├── models/ # Model tests
+├── tasks/ # Task tests
+├── utils/
+│ └── tasks/ # Task utility tests
+└── views/ # View tests
+```
+```
+
+## Deployment Documentation
+
+```markdown
+## Deployment
+
+### Production Setup
+
+#### Prerequisites
+
+- Docker installed on production server
+- SSL certificate configured
+- Domain name configured
+- Firewall rules configured
+
+#### Step 1: Prepare Environment
+
+```bash
+# Create production environment file
+cp .example.env .prod.env
+
+# Edit with production values
+nano .prod.env
+
+# Important: Set DEBUG=False
+# Important: Set strong SECRET_KEY
+# Important: Configure ALLOWED_HOSTS
+```
+
+#### Step 2: Build Docker Image
+
+```bash
+# Build image
+make build BUILD_TAG=v1.0.0
+
+# Test image locally
+make run BUILD_TAG=v1.0.0
+```
+
+#### Step 3: Deploy
+
+```bash
+# Push image to registry
+docker tag impresso/impresso-user-admin:v1.0.0 registry.example.com/impresso-user-admin:v1.0.0
+docker push registry.example.com/impresso-user-admin:v1.0.0
+
+# On production server
+docker pull registry.example.com/impresso-user-admin:v1.0.0
+docker-compose up -d
+```
+
+#### Step 4: Run Migrations
+
+```bash
+docker-compose exec web python manage.py migrate
+```
+
+#### Step 5: Collect Static Files
+
+```bash
+docker-compose exec web python manage.py collectstatic --noinput
+```
+
+### Monitoring
+
+- Check logs: `docker-compose logs -f web`
+- Check Celery: `docker-compose logs -f celery`
+- Monitor Redis: `redis-cli info`
+- Monitor MySQL: Check database connections
+
+### Backup
+
+```bash
+# Backup database
+docker-compose exec db mysqldump -u user -p database > backup.sql
+
+# Backup media files
+tar -czf media_backup.tar.gz media/
+```
+```
+
+## Contributing Guidelines
+
+```markdown
+## Contributing
+
+We welcome contributions! Please follow these guidelines:
+
+### Development Workflow
+
+1. Fork the repository
+2. Create a feature branch: `git checkout -b feature/my-feature`
+3. Make your changes
+4. Write or update tests
+5. Run tests: `ENV=dev pipenv run ./manage.py test`
+6. Run type checking: `pipenv run mypy impresso`
+7. Commit changes: `git commit -m "Add my feature"`
+8. Push to branch: `git push origin feature/my-feature`
+9. Create Pull Request
+
+### Code Style
+
+- Follow PEP 8 style guide
+- Use type hints for all functions
+- Write docstrings for all public functions/classes
+- Keep functions small and focused
+- Write descriptive commit messages
+
+### Testing
+
+- Write tests for all new features
+- Maintain test coverage above 80%
+- Test both success and error cases
+- Use meaningful test names
+
+### Documentation
+
+- Update README for new features
+- Add docstrings to new code
+- Update API documentation if applicable
+- Update CHANGELOG.md
+```
+
+## References
+
+- [Write the Docs](https://www.writethedocs.org/)
+- [Google Style Guide](https://google.github.io/styleguide/pyguide.html)
+- [Keep a Changelog](https://keepachangelog.com/)
+- [Semantic Versioning](https://semver.org/)
+- [Django Documentation](https://docs.djangoproject.com/)
diff --git a/.github/agents/testing.md b/.github/agents/testing.md
new file mode 100644
index 0000000..3776378
--- /dev/null
+++ b/.github/agents/testing.md
@@ -0,0 +1,542 @@
+# Agent: Testing
+
+This agent specializes in writing and maintaining tests for the impresso-user-admin Django application.
+
+## Expertise
+
+- Writing Django unit tests and integration tests
+- Testing Celery tasks and async operations
+- Mocking external services (SMTP)
+- Testing email functionality
+- Database transaction testing
+- User permission and access control testing
+
+## Test Framework
+
+The project uses Django's built-in testing framework based on unittest.
+
+### Test Types
+
+1. **TestCase** - Standard test case with database rollback
+ - Use for most tests
+ - Database changes are rolled back after each test
+ - Faster than TransactionTestCase
+
+2. **TransactionTestCase** - Test case with transaction support
+ - Use when testing transaction behavior
+ - Use when testing signals that depend on commits
+ - Database is flushed between tests (slower)
+
+## Test Organization
+
+### Directory Structure
+
+```
+impresso/tests/
+├── __init__.py
+├── test_runner.py # Custom test runner
+├── models/ # Model tests
+├── tasks/ # Task tests
+│ ├── __init__.py
+│ └── test_*.py
+└── utils/
+ └── tasks/ # Task utility tests
+ ├── __init__.py
+ ├── test_account.py
+ ├── test_userBitmap.py
+ └── email.py
+```
+
+### Test File Naming
+
+- Prefix test files with `test_`: `test_account.py`
+- Mirror the structure of the code being tested
+- Group related tests in the same file
+
+### Test Class Naming
+
+```python
+class TestFeatureName(TestCase):
+ """
+ Test feature description.
+
+ Run with:
+ ENV=dev pipenv run ./manage.py test impresso.tests.path.TestFeatureName
+ """
+```
+
+## Running Tests
+
+```bash
+# Run all tests
+ENV=dev pipenv run ./manage.py test
+
+# Run specific app tests
+ENV=dev pipenv run ./manage.py test impresso
+
+# Run specific test file
+ENV=dev pipenv run ./manage.py test impresso.tests.utils.tasks.test_account
+
+# Run specific test class
+ENV=dev pipenv run ./manage.py test impresso.tests.utils.tasks.test_account.TestAccountPlanChange
+
+# Run specific test method
+ENV=dev pipenv run ./manage.py test impresso.tests.utils.tasks.test_account.TestAccountPlanChange.test_send_email_plan_change
+
+# With console email backend (to see email output)
+EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend ENV=dev pipenv run ./manage.py test
+
+# With verbose output
+ENV=dev pipenv run ./manage.py test --verbosity=2
+```
+
+## Test Structure
+
+### Basic Test Template
+
+```python
+import logging
+from django.test import TestCase, TransactionTestCase
+from django.contrib.auth.models import User, Group
+from django.core import mail
+from impresso.models import ModelName
+from impresso.utils.tasks.module import function_to_test
+
+logger = logging.getLogger("console")
+
+
+class TestFeature(TestCase):
+ """
+ Test feature functionality.
+
+ ENV=dev pipenv run ./manage.py test impresso.tests.module.TestFeature
+ """
+
+ def setUp(self):
+ """Set up test fixtures before each test method."""
+ # Create test user
+ self.user = User.objects.create_user(
+ username="testuser",
+ first_name="Jane",
+ last_name="Doe",
+ password="testpass123",
+ email="test@example.com",
+ )
+
+ # Create default groups (required for many tests)
+ from impresso.signals import create_default_groups
+ create_default_groups(sender="impresso")
+
+ # Clear mail outbox
+ mail.outbox = []
+
+ def tearDown(self):
+ """Clean up after each test method."""
+ pass
+
+ def test_feature_success(self):
+ """Test successful feature execution."""
+ # Arrange
+ expected_result = "expected"
+
+ # Act
+ result = function_to_test(self.user.id)
+
+ # Assert
+ self.assertEqual(result, expected_result)
+ self.assertEqual(len(mail.outbox), 1)
+ self.assertEqual(mail.outbox[0].subject, "Expected Subject")
+```
+
+### Testing with Transactions
+
+```python
+class TestFeatureWithTransaction(TransactionTestCase):
+ """
+ Test feature requiring transaction support.
+
+ ENV=dev pipenv run ./manage.py test impresso.tests.module.TestFeatureWithTransaction
+ """
+
+ def setUp(self):
+ """Set up test fixtures."""
+ self.user = User.objects.create_user(
+ username="testuser",
+ email="test@example.com",
+ password="testpass123"
+ )
+ from impresso.signals import create_default_groups
+ create_default_groups(sender="impresso")
+
+ def test_with_commit(self):
+ """Test behavior after transaction commit."""
+ # Your test code
+ pass
+```
+
+## Testing Email Functionality
+
+### Email Testing Pattern
+
+```python
+from django.core import mail
+from django.conf import settings
+
+def test_send_email(self):
+ """Test email sending functionality."""
+ # Clear outbox before test
+ mail.outbox = []
+
+ # Call function that sends email
+ send_email_function(user_id=self.user.id)
+
+ # Check email was sent
+ self.assertEqual(len(mail.outbox), 1)
+
+ # Check email properties
+ email = mail.outbox[0]
+ self.assertEqual(email.subject, "Expected Subject")
+ self.assertEqual(email.to, [self.user.email])
+ self.assertEqual(email.from_email, f"Impresso Team <{settings.DEFAULT_FROM_EMAIL}>")
+
+ # Check email content
+ self.assertIn("Dear Jane,", email.body)
+ self.assertIn("expected text", email.body)
+
+ # Check HTML alternative exists
+ self.assertEqual(len(email.alternatives), 1)
+ html_content, content_type = email.alternatives[0]
+ self.assertEqual(content_type, "text/html")
+ self.assertIn("", html_content)
+
+def test_multiple_emails(self):
+ """Test when multiple emails are sent."""
+ mail.outbox = []
+
+ # Function sends email to user and staff
+ send_emails_after_user_registration(self.user.id)
+
+ # Check both emails sent
+ self.assertEqual(len(mail.outbox), 2, "Should send email to user and staff")
+
+ # Check first email (to user)
+ self.assertEqual(mail.outbox[0].to, [self.user.email])
+
+ # Check second email (to staff)
+ self.assertEqual(mail.outbox[1].to, [settings.DEFAULT_FROM_EMAIL])
+```
+
+## Testing User Groups and Permissions
+
+### Group Setup
+
+```python
+def setUp(self):
+ """Set up user with specific plan."""
+ self.user = User.objects.create_user(
+ username="testuser",
+ email="test@example.com",
+ password="testpass123"
+ )
+
+ # Create default groups
+ from impresso.signals import create_default_groups
+ create_default_groups(sender="impresso")
+
+ # Add user to specific plan
+ group = Group.objects.get(name=settings.IMPRESSO_GROUP_USER_PLAN_RESEARCHER)
+ self.user.groups.add(group)
+ self.user.is_active = True
+ self.user.save()
+
+def test_user_permissions(self):
+ """Test user has correct permissions."""
+ # Check user is in group
+ group_names = list(self.user.groups.values_list("name", flat=True))
+ self.assertIn(settings.IMPRESSO_GROUP_USER_PLAN_RESEARCHER, group_names)
+
+ # Check user bitmap
+ from impresso.models import UserBitmap
+ user_bitmap = UserBitmap.objects.get(user=self.user)
+ self.assertEqual(
+ user_bitmap.get_bitmap_as_int(),
+ UserBitmap.USER_PLAN_RESEARCHER
+ )
+```
+
+## Testing Celery Tasks
+
+### Testing Task Execution
+
+```python
+from impresso.tasks.my_tasks import my_task
+from impresso.models import Job
+
+class TestCeleryTask(TransactionTestCase):
+ """Test Celery task functionality."""
+
+ def setUp(self):
+ self.user = User.objects.create_user(
+ username="testuser",
+ email="test@example.com"
+ )
+ from impresso.signals import create_default_groups
+ create_default_groups(sender="impresso")
+
+ def test_task_execution(self):
+ """Test task executes successfully."""
+ # Create job for tracking
+ job = Job.objects.create(
+ creator=self.user,
+ type=Job.EXP,
+ status=Job.RUN,
+ )
+
+ # Execute task (runs synchronously in tests)
+ result = my_task.apply(args=[job.id])
+
+ # Check result
+ self.assertTrue(result.successful())
+
+ # Refresh job from database
+ job.refresh_from_db()
+ self.assertEqual(job.status, Job.DONE)
+```
+
+### Testing Task Helpers
+
+```python
+from impresso.utils.tasks import get_pagination
+from impresso.models import Job, Profile
+
+def test_pagination(self):
+ """Test pagination calculation."""
+ # Create user with profile
+ profile = Profile.objects.create(
+ user=self.user,
+ uid="test-user",
+ max_loops_allowed=50
+ )
+
+ # Create job
+ job = Job.objects.create(
+ creator=self.user,
+ type=Job.EXP,
+ )
+
+ # Test pagination
+ page, loops, progress, max_loops = get_pagination(
+ skip=0,
+ limit=100,
+ total=1000,
+ job=job
+ )
+
+ self.assertEqual(page, 1)
+ self.assertEqual(loops, 10)
+ self.assertEqual(progress, 0.1)
+```
+
+## Testing Exceptions
+
+### Exception Testing Pattern
+
+```python
+def test_exception_raised(self):
+ """Test function raises appropriate exception."""
+ with self.assertRaises(ValueError, msg="Should raise ValueError"):
+ function_that_should_fail(invalid_param="bad")
+
+def test_user_not_found(self):
+ """Test handling of non-existent user."""
+ with self.assertRaises(User.DoesNotExist):
+ function_requiring_user(user_id=99999)
+
+def test_validation_error(self):
+ """Test validation error handling."""
+ from django.core.exceptions import ValidationError
+
+ with self.assertRaises(ValidationError):
+ function_with_validation(invalid_data)
+```
+
+## Mocking External Services
+
+### Mocking SMTP
+
+```python
+from unittest.mock import patch
+import smtplib
+
+@patch('smtplib.SMTP')
+def test_email_smtp_error(self, mock_smtp):
+ """Test handling of SMTP errors."""
+ # Setup mock to raise exception
+ mock_smtp.side_effect = smtplib.SMTPException("Connection failed")
+
+ # Call function that sends email
+ with self.assertRaises(smtplib.SMTPException):
+ send_email_function(user_id=self.user.id)
+```
+
+## Testing Database Models
+
+```python
+from impresso.models import UserBitmap
+
+def test_model_creation(self):
+ """Test model instance creation."""
+ user_bitmap = UserBitmap.objects.create(
+ user=self.user
+ )
+
+ self.assertEqual(user_bitmap.user, self.user)
+ self.assertIsNotNone(user_bitmap.date_created)
+
+def test_model_relationships(self):
+ """Test model relationships."""
+ user_bitmap = UserBitmap.objects.get(user=self.user)
+
+ # Test relationship
+ self.assertEqual(user_bitmap.user, self.user)
+```
+
+## Common Assertions
+
+```python
+# Equality
+self.assertEqual(actual, expected)
+self.assertNotEqual(actual, unexpected)
+
+# Truth
+self.assertTrue(condition)
+self.assertFalse(condition)
+
+# Existence
+self.assertIsNone(value)
+self.assertIsNotNone(value)
+
+# Collections (lists, sets, etc.)
+self.assertIn(item, list_or_set)
+self.assertNotIn(item, list_or_set)
+self.assertEqual(len(list_or_set), expected_length)
+
+# Strings
+self.assertIn("substring", text)
+self.assertTrue(text.startswith("prefix"))
+
+# Exceptions
+with self.assertRaises(ExceptionType):
+ function_that_raises()
+
+# Database queries
+self.assertEqual(Model.objects.count(), expected_count)
+self.assertTrue(Model.objects.filter(field=value).exists())
+```
+
+## Test Data Best Practices
+
+### Creating Test Users
+
+```python
+def setUp(self):
+ """Create test users with different roles."""
+ # Basic user
+ self.basic_user = User.objects.create_user(
+ username="basic",
+ email="basic@example.com",
+ password="testpass123"
+ )
+
+ # Staff user
+ self.staff_user = User.objects.create_user(
+ username="staff",
+ email="staff@example.com",
+ password="testpass123",
+ is_staff=True
+ )
+
+ # Superuser
+ self.admin_user = User.objects.create_superuser(
+ username="admin",
+ email="admin@example.com",
+ password="testpass123"
+ )
+```
+
+### Creating Test Data
+
+```python
+def setUp(self):
+ """Create test data."""
+ # Create groups
+ from impresso.signals import create_default_groups
+ create_default_groups(sender="impresso")
+
+ # Create profile
+ from impresso.models import Profile
+ self.profile = Profile.objects.create(
+ user=self.user,
+ uid=f"test-{self.user.username}",
+ max_loops_allowed=100
+ )
+
+ # Create user bitmap
+ from impresso.models import UserBitmap
+ self.user_bitmap = UserBitmap.objects.create(
+ user=self.user
+ )
+```
+
+## Debugging Tests
+
+### Print Debug Information
+
+```python
+def test_with_debug_output(self):
+ """Test with debug output."""
+ result = function_to_test()
+
+ # Print to console for debugging
+ print(f"Result: {result}")
+ print(f"Mail outbox: {mail.outbox}")
+ if mail.outbox:
+ print(f"Email body: {mail.outbox[0].body}")
+
+ # Your assertions
+ self.assertEqual(result, expected)
+```
+
+### Using Django Debug Toolbar
+
+The test runner can be configured to show SQL queries:
+
+```python
+# In test method
+from django.test.utils import override_settings
+from django.db import connection
+
+@override_settings(DEBUG=True)
+def test_with_query_debugging(self):
+ """Test with SQL query debugging."""
+ with self.assertNumQueries(expected_query_count):
+ function_to_test()
+
+ # Print queries
+ for query in connection.queries:
+ print(query['sql'])
+```
+
+## Test Coverage
+
+While not currently enforced, aim for:
+- 80%+ code coverage for critical paths
+- 100% coverage for security-sensitive code
+- Test both success and failure scenarios
+- Test edge cases and boundary conditions
+
+## References
+
+- Django Testing Documentation: https://docs.djangoproject.com/en/stable/topics/testing/
+- unittest Documentation: https://docs.python.org/3/library/unittest.html
+- Django Mail Testing: https://docs.djangoproject.com/en/stable/topics/testing/tools/#email-services
diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md
new file mode 100644
index 0000000..4e6bdb8
--- /dev/null
+++ b/.github/copilot-instructions.md
@@ -0,0 +1,252 @@
+# GitHub Copilot Instructions for impresso-user-admin
+
+## Repository Overview
+
+This is a Django application that manages user-related information for the Impresso project's Master DB. The application uses **Celery** as the background task processing system for handling asynchronous operations like email sending and user account management.
+
+## Technology Stack
+
+- **Framework**: Django (Python 3.12+)
+- **Task Queue**: Celery with Redis as the broker
+- **Database**: MySQL (managed via pymysql)
+- **Dependency Management**: pipenv
+- **Type Checking**: mypy
+- **Containerization**: Docker & docker-compose
+
+## Project Structure
+
+```
+impresso-user-admin/
+├── impresso/
+│ ├── celery.py # Celery application configuration
+│ ├── settings.py # Django settings
+│ ├── models/ # Django models
+│ ├── tasks/ # Celery task definitions
+│ ├── utils/
+│ │ └── tasks/ # Task helper functions and utilities
+│ └── tests/ # Test suite
+├── .github/
+│ ├── agents/ # Agent-specific instructions
+│ └── copilot-instructions.md
+└── manage.py
+```
+
+## Celery Task Organization
+
+### Task Modules
+
+The application organizes Celery tasks into two main directories:
+
+1. **`impresso/tasks/`** - Contains Celery task decorators and task definitions
+ - `userChangePlanRequest_task.py` - Plan change request tasks
+ - `userSpecialMembershipRequest_tasks.py` - Special membership tasks
+
+2. **`impresso/utils/tasks/`** - Contains helper functions used by tasks
+ - `__init__.py` - Common utilities (job progress tracking)
+ - `account.py` - User account and email operations
+ - `userBitmap.py` - User permission bitmap updates
+ - `email.py` - Email rendering and sending utilities
+ - `userSpecialMembershipRequest.py` - Special membership operations
+
+### Task Helper Functions
+
+Common task utilities are provided in `impresso/utils/tasks/__init__.py`:
+
+- `update_job_progress()` - Update job status and progress in DB and Redis
+- `update_job_completed()` - Mark a job as completed
+- `is_task_stopped()` - Check if user has stopped a job
+
+Task states:
+- `TASKSTATE_INIT` - Task initialization
+- `TASKSTATE_PROGRESS` - Task in progress
+- `TASKSTATE_SUCCESS` - Task completed successfully
+- `TASKSTATE_STOPPED` - Task stopped by user
+
+## Coding Conventions
+
+### General Python
+
+- Use Python 3.12+ type hints for all function signatures
+- Follow PEP 8 style guidelines
+- Use descriptive variable names
+- Include docstrings for all public functions and classes
+- Use f-strings for string formatting
+
+### Django Specific
+
+- Use Django ORM for all database operations
+- Follow Django naming conventions for models, views, and managers
+- Use Django's transaction management for atomic operations
+- Settings should be accessed via `django.conf.settings`
+
+### Celery Tasks
+
+- Define tasks in `impresso/tasks/` directory
+- Place helper functions in `impresso/utils/tasks/` directory
+- Use `@shared_task` or `@app.task` decorators with appropriate configuration
+- Always bind tasks when using `self` (e.g., for updating state)
+- Include retry logic with exponential backoff for resilient tasks
+- Use structured logging with task context (job_id, user_id)
+
+Example task pattern:
+```python
+from celery import shared_task
+from celery.utils.log import get_task_logger
+
+logger = get_task_logger(__name__)
+
+@shared_task(
+ bind=True,
+ autoretry_for=(Exception,),
+ exponential_backoff=2,
+ retry_kwargs={"max_retries": 5},
+ retry_jitter=True,
+)
+def my_task(self, user_id: int) -> None:
+ logger.info(f"[user:{user_id}] Starting task...")
+ # Task implementation
+```
+
+### Logging
+
+- Use structured logging with context: `logger.info(f"[job:{job.pk} user:{user.pk}] message")`
+- Include relevant IDs in log messages (job, user, etc.)
+- Use appropriate log levels: DEBUG, INFO, WARNING, ERROR, EXCEPTION
+- Get logger via `get_task_logger(__name__)` in task files
+- Use default_logger pattern: `default_logger = logging.getLogger(__name__)` in utility files
+
+### Error Handling
+
+- Catch specific exceptions rather than generic Exception
+- Log exceptions with appropriate context
+- Use exponential backoff for retries
+- Handle database IntegrityErrors appropriately
+- Validate user input before processing
+
+### Email Operations
+
+- Use `send_templated_email_with_context()` from `impresso/utils/tasks/email.py`
+- Email templates are in `impresso/templates/emails/` (both .txt and .html)
+- Always include both text and HTML versions
+- Handle SMTP exceptions gracefully
+- Log email sending status
+
+
+
+### Job Management
+
+- Jobs track long-running asynchronous tasks
+- Update job progress using `update_job_progress()`
+- Check for user-initiated stops with `is_task_stopped()`
+- Store task metadata in job.extra field as JSON
+
+## Testing
+
+### Running Tests
+
+```bash
+# Run all tests
+ENV=dev pipenv run ./manage.py test
+
+# Run specific test module
+ENV=dev pipenv run ./manage.py test impresso.tests.utils.tasks.test_account
+
+# Run with email backend visible
+EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend ENV=dev pipenv run ./manage.py test
+```
+
+### Test Organization
+
+- Tests are in `impresso/tests/` directory
+- Mirror the structure of the main codebase
+- Use `TestCase` for standard tests
+- Use `TransactionTestCase` for tests requiring DB transactions
+- Clear `mail.outbox` between test cases
+- Create default groups in setUp using `create_default_groups()`
+
+### Test Conventions
+
+- Name test methods descriptively: `test_send_email_plan_change`
+- Use assertions that provide clear failure messages
+- Test both success and error cases
+- Mock external services (SMTP) when appropriate
+- Test with different user plans and permissions
+
+## Development Workflow
+
+### Setting Up Environment
+
+```bash
+# Install dependencies
+pipenv install
+
+# Start Redis and MySQL
+docker compose up -d
+
+# Run migrations
+ENV=dev pipenv run ./manage.py migrate
+
+# Create superuser
+ENV=dev pipenv run ./manage.py createsuperuser
+
+# Run development server
+ENV=dev pipenv run ./manage.py runserver
+
+# Run Celery worker (in separate terminal)
+ENV=dev pipenv run celery -A impresso worker -l info
+```
+
+### Type Checking
+
+```bash
+# Run mypy
+pipenv run mypy --config-file ./.mypy.ini impresso
+```
+
+### Common Commands
+
+```bash
+# Create accounts
+ENV=dev pipenv run ./manage.py createaccount user@example.com
+
+# Stop a job
+ENV=dev pipenv run ./manage.py stopjob
+
+# Update user bitmap
+ENV=dev pipenv run ./manage.py updateuserbitmap
+```
+
+## Security Considerations
+
+- Never commit secrets to source code
+- Use environment variables for sensitive configuration
+- Validate and sanitize user inputs
+- Use Django's built-in security features
+- Respect user permissions and bitmap access controls
+
+## Configuration
+
+- Environment-specific settings via `.env` files (`.dev.env`, `.prod.env`)
+- Use `ENV` environment variable to select configuration: `ENV=dev`
+- See `.example.env` for available configuration options
+- Settings loaded via `dotenv` in `impresso/base.py`
+
+## Adding New Tasks
+
+When adding new Celery tasks:
+
+1. Create task definition in `impresso/tasks/` with proper decorators
+2. Create helper functions in `impresso/utils/tasks/` if needed
+3. Use structured logging with context
+4. Implement retry logic with exponential backoff
+5. Update job progress for long-running tasks
+6. Check for user-initiated stops in loops
+7. Handle errors gracefully
+8. Add tests in `impresso/tests/tasks/`
+9. Document the task purpose and parameters
+
+## Resources
+
+- Main repository: https://github.com/impresso/impresso-user-admin
+- Impresso project: https://impresso-project.ch
+- License: GNU Affero General Public License v3.0
diff --git a/.github/workflows/docker-build-publish.yml b/.github/workflows/docker-build-publish.yml
index 785a302..14e828b 100644
--- a/.github/workflows/docker-build-publish.yml
+++ b/.github/workflows/docker-build-publish.yml
@@ -5,6 +5,13 @@ on:
push:
branches:
- develop
+ paths-ignore:
+ - 'README.md'
+ - 'LICENSE'
+ - '**/*.md'
+ - '.github/**'
+ - '.cursorrules'
+ workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
diff --git a/AGENTS.md b/AGENTS.md
new file mode 100644
index 0000000..e005cb0
--- /dev/null
+++ b/AGENTS.md
@@ -0,0 +1,25 @@
+## Repository Overview
+
+This is a Django application that manages user-related information for the Impresso project's Master DB. The application uses **Celery** as the background task processing system for handling asynchronous operations like email sending and user account management.
+
+## Technology Stack
+
+- **Framework**: Django (Python 3.12+)
+- **Task Queue**: Celery with Redis as the broker
+- **Database**: MySQL (managed via pymysql)
+- **Dependency Management**: pipenv
+- **Type Checking**: mypy
+- **Containerization**: Docker & docker-compose
+
+## Project Structure, AI & Agent Instructions
+
+This repository contains AI coding instructions and architectural guidelines in:
+
+- `.github/copilot-instructions.md`
+
+Those instructions define:
+- Coding style
+- Task conventions
+- Architectural constraints
+
+All agents and contributors MUST follow those rules when adding or modifying tasks.
diff --git a/Dockerfile b/Dockerfile
index 851c54e..e5b2fcf 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.12.2-alpine
+FROM python:3.12.4-alpine
# RUN set -ex \
# # Create a non-root user
diff --git a/Pipfile b/Pipfile
index 43ab838..c6edf6c 100644
--- a/Pipfile
+++ b/Pipfile
@@ -5,7 +5,7 @@ verify_ssl = true
[packages]
celery = "*"
-django = "==5.2.8"
+django = "==5.2.10"
django-registration = "*"
django-unfold = "==0.72.0"
gunicorn = "*"
diff --git a/Pipfile.lock b/Pipfile.lock
index aca6134..d16c458 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "b609b9e5502692e9d1ca83ef611885c5425ba950ec19e2fb1034f9dddb94919d"
+ "sha256": "1ebd1ee3b6cfacae3276dcea2c338e79f7efce4f101fe317d1dde5f712709275"
},
"pipfile-spec": 6,
"requires": {
@@ -42,20 +42,20 @@
},
"celery": {
"hashes": [
- "sha256:33cf01477b175017fc8f22c5ee8a65157591043ba8ca78a443fe703aa910f581",
- "sha256:641405206042d52ae460e4e9751a2e31b06cf80ab836fcf92e0b9311d7ea8113"
+ "sha256:3ffafacbe056951b629c7abcf9064c4a2366de0bdfc9fdba421b97ebb68619a5",
+ "sha256:4a8921c3fcf2ad76317d3b29020772103581ed2454c4c042cc55dcc43585009b"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
- "version": "==5.6.0"
+ "version": "==5.6.2"
},
"certifi": {
"hashes": [
- "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b",
- "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"
+ "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c",
+ "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"
],
"markers": "python_version >= '3.7'",
- "version": "==2025.11.12"
+ "version": "==2026.1.4"
},
"charset-normalizer": {
"hashes": [
@@ -216,12 +216,12 @@
},
"django": {
"hashes": [
- "sha256:23254866a5bb9a2cfa6004e8b809ec6246eba4b58a7589bc2772f1bcc8456c7f",
- "sha256:37e687f7bd73ddf043e2b6b97cfe02fcbb11f2dbb3adccc6a2b18c6daa054d7f"
+ "sha256:74df100784c288c50a2b5cad59631d71214f40f72051d5af3fdf220c20bdbbbe",
+ "sha256:cf85067a64250c95d5f9067b056c5eaa80591929f7e16fbcd997746e40d6c45c"
],
"index": "pypi",
"markers": "python_version >= '3.10'",
- "version": "==5.2.8"
+ "version": "==5.2.10"
},
"django-registration": {
"hashes": [
@@ -241,22 +241,14 @@
"markers": "python_version >= '3.10' and python_version < '4.0'",
"version": "==0.72.0"
},
- "exceptiongroup": {
- "hashes": [
- "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219",
- "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"
- ],
- "markers": "python_version >= '3.7'",
- "version": "==1.3.1"
- },
"fakeredis": {
"hashes": [
- "sha256:dd8246db159f0b66a1ced7800c9d5ef07769e3d2fde44b389a57f2ce2834e444",
- "sha256:e80c8886db2e47ba784f7dfe66aad6cd2eab76093c6bfda50041e5bc890d46cf"
+ "sha256:d7bc9a69d21df108a6451bbffee23b3eba432c21a654afc7ff2d295428ec5770",
+ "sha256:de535f3f9ccde1c56672ab2fdd6a8efbc4f2619fc2f1acc87b8737177d71c965"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==2.32.1"
+ "version": "==2.33.0"
},
"gunicorn": {
"hashes": [
@@ -277,138 +269,138 @@
},
"kombu": {
"hashes": [
- "sha256:90f1febb57ad4f53ca327a87598191b2520e0c793c75ea3b88d98e3b111282e4",
- "sha256:b69e3f5527ec32fc5196028a36376501682973e9620d6175d1c3d4eaf7e95409"
+ "sha256:8060497058066c6f5aed7c26d7cd0d3b574990b09de842a8c5aaed0b92cc5a55",
+ "sha256:efcfc559da324d41d61ca311b0c64965ea35b4c55cc04ee36e55386145dace93"
],
"markers": "python_version >= '3.9'",
- "version": "==5.6.1"
+ "version": "==5.6.2"
},
"librt": {
"hashes": [
- "sha256:04f8ce401d4f6380cfc42af0f4e67342bf34c820dae01343f58f472dbac75dcf",
- "sha256:05f385a414de3f950886ea0aad8f109650d4b712cf9cc14cc17f5f62a9ab240b",
- "sha256:0765b0fe0927d189ee14b087cd595ae636bef04992e03fe6dfdaa383866c8a46",
- "sha256:078cd77064d1640cb7b0650871a772956066174d92c8aeda188a489b58495179",
- "sha256:09262cb2445b6f15d09141af20b95bb7030c6f13b00e876ad8fdd1a9045d6aa5",
- "sha256:0c74c26736008481c9f6d0adf1aedb5a52aff7361fea98276d1f965c0256ee70",
- "sha256:0e0f2b79993fec23a685b3e8107ba5f8675eeae286675a216da0b09574fa1e47",
- "sha256:10a95ad074e2a98c9e4abc7f5b7d40e5ecbfa84c04c6ab8a70fabf59bd429b88",
- "sha256:14b345eb7afb61b9fdcdfda6738946bd11b8e0f6be258666b0646af3b9bb5916",
- "sha256:17000df14f552e86877d67e4ab7966912224efc9368e998c96a6974a8d609bf9",
- "sha256:1b51ba7d9d5d9001494769eca8c0988adce25d0a970c3ba3f2eb9df9d08036fc",
- "sha256:1ef42ff4edd369e84433ce9b188a64df0837f4f69e3d34d3b34d4955c599d03f",
- "sha256:25b1b60cb059471c0c0c803e07d0dfdc79e41a0a122f288b819219ed162672a3",
- "sha256:26b8026393920320bb9a811b691d73c5981385d537ffc5b6e22e53f7b65d4122",
- "sha256:324462fe7e3896d592b967196512491ec60ca6e49c446fe59f40743d08c97917",
- "sha256:349b6873ebccfc24c9efd244e49da9f8a5c10f60f07575e248921aae2123fc42",
- "sha256:36a8e337461150b05ca2c7bdedb9e591dfc262c5230422cea398e89d0c746cdc",
- "sha256:36b2ec8c15030002c7f688b4863e7be42820d7c62d9c6eece3db54a2400f0530",
- "sha256:38320386a48a15033da295df276aea93a92dfa94a862e06893f75ea1d8bbe89d",
- "sha256:3ac2a7835434b31def8ed5355dd9b895bbf41642d61967522646d1d8b9681106",
- "sha256:3caa0634c02d5ff0b2ae4a28052e0d8c5f20d497623dc13f629bd4a9e2a6efad",
- "sha256:3e84a4121a7ae360ca4da436548a9c1ca8ca134a5ced76c893cc5944426164bd",
- "sha256:3f0e4bd9bcb0ee34fa3dbedb05570da50b285f49e52c07a241da967840432513",
- "sha256:4018904c83eab49c814e2494b4e22501a93cdb6c9f9425533fe693c3117126f9",
- "sha256:408a36ddc75e91918cb15b03460bdc8a015885025d67e68c6f78f08c3a88f522",
- "sha256:45660d26569cc22ed30adf583389d8a0d1b468f8b5e518fcf9bfe2cd298f9dd1",
- "sha256:4aa4a93a353ccff20df6e34fa855ae8fd788832c88f40a9070e3ddd3356a9f0e",
- "sha256:4bca9e4c260233fba37b15c4ec2f78aa99c1a79fbf902d19dd4a763c5c3fb751",
- "sha256:514f3f363d1ebc423357d36222c37e5c8e6674b6eae8d7195ac9a64903722057",
- "sha256:54f3b2177fb892d47f8016f1087d21654b44f7fc4cf6571c1c6b3ea531ab0fcf",
- "sha256:57705e8eec76c5b77130d729c0f70190a9773366c555c5457c51eace80afd873",
- "sha256:5cc22f7f5c0cc50ed69f4b15b9c51d602aabc4500b433aaa2ddd29e578f452f7",
- "sha256:61348cc488b18d1b1ff9f3e5fcd5ac43ed22d3e13e862489d2267c2337285c08",
- "sha256:64645b757d617ad5f98c08e07620bc488d4bced9ced91c6279cec418f16056fa",
- "sha256:669ff2495728009a96339c5ad2612569c6d8be4474e68f3f3ac85d7c3261f5f5",
- "sha256:6bac97e51f66da2ca012adddbe9fd656b17f7368d439de30898f24b39512f40f",
- "sha256:6d46aa46aa29b067f0b8b84f448fd9719aaf5f4c621cc279164d76a9dc9ab3e8",
- "sha256:71f0a5918aebbea1e7db2179a8fe87e8a8732340d9e8b8107401fb407eda446e",
- "sha256:74418f718083009108dc9a42c21bf2e4802d49638a1249e13677585fcc9ca176",
- "sha256:760c25ed6ac968e24803eb5f7deb17ce026902d39865e83036bacbf5cf242aa8",
- "sha256:822ca79e28720a76a935c228d37da6579edef048a17cd98d406a2484d10eda78",
- "sha256:86605d5bac340beb030cbc35859325982a79047ebdfba1e553719c7126a2389d",
- "sha256:87597e3d57ec0120a3e1d857a708f80c02c42ea6b00227c728efbc860f067c45",
- "sha256:8983c5c06ac9c990eac5eb97a9f03fe41dc7e9d7993df74d9e8682a1056f596c",
- "sha256:8c659f9fb8a2f16dc4131b803fa0144c1dadcb3ab24bb7914d01a6da58ae2457",
- "sha256:8e695f25d1a425ad7a272902af8ab8c8d66c1998b177e4b5f5e7b4e215d0c88a",
- "sha256:8f8ed5053ef9fb08d34f1fd80ff093ccbd1f67f147633a84cf4a7d9b09c0f089",
- "sha256:92267f865c7bbd12327a0d394666948b9bf4b51308b52947c0cc453bfa812f5d",
- "sha256:98e4bbecbef8d2a60ecf731d735602feee5ac0b32117dbbc765e28b054bac912",
- "sha256:9e716f9012148a81f02f46a04fc4c663420c6fbfeacfac0b5e128cf43b4413d3",
- "sha256:9f2a6623057989ebc469cd9cc8fe436c40117a0147627568d03f84aef7854c55",
- "sha256:a218f85081fc3f70cddaed694323a1ad7db5ca028c379c214e3a7c11c0850523",
- "sha256:aa346e202e6e1ebc01fe1c69509cffe486425884b96cb9ce155c99da1ecbe0e9",
- "sha256:ad8ba80cdcea04bea7b78fcd4925bfbf408961e9d8397d2ee5d3ec121e20c08c",
- "sha256:afb39550205cc5e5c935762c6bf6a2bb34f7d21a68eadb25e2db7bf3593fecc0",
- "sha256:b2922a0e8fa97395553c304edc3bd36168d8eeec26b92478e292e5d4445c1ef0",
- "sha256:b47395091e7e0ece1e6ebac9b98bf0c9084d1e3d3b2739aa566be7e56e3f7bf2",
- "sha256:c0ecf4786ad0404b072196b5df774b1bb23c8aacdcacb6c10b4128bc7b00bd01",
- "sha256:c5b31bed2c2f2fa1fcb4815b75f931121ae210dc89a3d607fb1725f5907f1437",
- "sha256:c724a884e642aa2bbad52bb0203ea40406ad742368a5f90da1b220e970384aae",
- "sha256:cb92741c2b4ea63c09609b064b26f7f5d9032b61ae222558c55832ec3ad0bcaf",
- "sha256:ced0925a18fddcff289ef54386b2fc230c5af3c83b11558571124bfc485b8c07",
- "sha256:cf1115207a5049d1f4b7b4b72de0e52f228d6c696803d94843907111cbf80610",
- "sha256:d3c9a07eafdc70556f8c220da4a538e715668c0c63cabcc436a026e4e89950bf",
- "sha256:d7769c579663a6f8dbf34878969ac71befa42067ce6bf78e6370bf0d1194997c",
- "sha256:d8f89c8d20dfa648a3f0a56861946eb00e5b00d6b00eea14bc5532b2fcfa8ef1",
- "sha256:d998b432ed9ffccc49b820e913c8f327a82026349e9c34fa3690116f6b70770f",
- "sha256:dcbe48f6a03979384f27086484dc2a14959be1613cb173458bd58f714f2c48f3",
- "sha256:e17b5b42c8045867ca9d1f54af00cc2275198d38de18545edaa7833d7e9e4ac8",
- "sha256:e18875e17ef69ba7dfa9623f2f95f3eda6f70b536079ee6d5763ecdfe6cc9040",
- "sha256:e61ab234624c9ffca0248a707feffe6fac2343758a36725d8eb8a6efef0f8c30",
- "sha256:ecc2c526547eacd20cb9fbba19a5268611dbc70c346499656d6cf30fae328977",
- "sha256:f33462b19503ba68d80dac8a1354402675849259fb3ebf53b67de86421735a3a",
- "sha256:fbedeb9b48614d662822ee514567d2d49a8012037fc7b4cd63f282642c2f4b7d",
- "sha256:fd98cacf4e0fabcd4005c452cb8a31750258a85cab9a59fb3559e8078da408d7",
- "sha256:fdcd095b1b812d756fa5452aca93b962cf620694c0cadb192cec2bb77dcca9a2"
+ "sha256:00105e7d541a8f2ee5be52caacea98a005e0478cfe78c8080fbb7b5d2b340c63",
+ "sha256:0241a6ed65e6666236ea78203a73d800dbed896cf12ae25d026d75dc1fcd1dac",
+ "sha256:03679b9856932b8c8f674e87aa3c55ea11c9274301f76ae8dc4d281bda55cf62",
+ "sha256:047164e5f68b7a8ebdf9fae91a3c2161d3192418aadd61ddd3a86a56cbe3dc85",
+ "sha256:171ca3a0a06c643bd0a2f62a8944e1902c94aa8e5da4db1ea9a8daf872685365",
+ "sha256:1a4ede613941d9c3470b0368be851df6bb78ab218635512d0370b27a277a0862",
+ "sha256:20e3946863d872f7cabf7f77c6c9d370b8b3d74333d3a32471c50d3a86c0a232",
+ "sha256:2991b6c3775383752b3ca0204842743256f3ad3deeb1d0adc227d56b78a9a850",
+ "sha256:31724b93baa91512bd0a376e7cf0b59d8b631ee17923b1218a65456fa9bda2e7",
+ "sha256:3469e1af9f1380e093ae06bedcbdd11e407ac0b303a56bbe9afb1d6824d4982d",
+ "sha256:389bd25a0db916e1d6bcb014f11aa9676cedaa485e9ec3752dfe19f196fd377b",
+ "sha256:3968762fec1b2ad34ce57458b6de25dbb4142713e9ca6279a0d352fa4e9f452b",
+ "sha256:39a4c76fee41007070f872b648cc2f711f9abf9a13d0c7162478043377b52c8e",
+ "sha256:3d1322800771bee4a91f3b4bd4e49abc7d35e65166821086e5afd1e6c0d9be44",
+ "sha256:41d7bb1e07916aeb12ae4a44e3025db3691c4149ab788d0315781b4d29b86afb",
+ "sha256:43d4e71b50763fcdcf64725ac680d8cfa1706c928b844794a7aa0fa9ac8e5f09",
+ "sha256:445b7304145e24c60288a2f172b5ce2ca35c0f81605f5299f3fa567e189d2e32",
+ "sha256:44e0c2cbc9bebd074cf2cdbe472ca185e824be4e74b1c63a8e934cea674bebf2",
+ "sha256:451e7ffcef8f785831fdb791bd69211f47e95dc4c6ddff68e589058806f044c6",
+ "sha256:46ef1f4b9b6cc364b11eea0ecc0897314447a66029ee1e55859acb3dd8757c93",
+ "sha256:4864045f49dc9c974dadb942ac56a74cd0479a2aafa51ce272c490a82322ea3c",
+ "sha256:4adc73614f0d3c97874f02f2c7fd2a27854e7e24ad532ea6b965459c5b757eca",
+ "sha256:4c3995abbbb60b3c129490fa985dfe6cac11d88fc3c36eeb4fb1449efbbb04fc",
+ "sha256:4d2f1e492cae964b3463a03dc77a7fe8742f7855d7258c7643f0ee32b6651dd3",
+ "sha256:535929b6eff670c593c34ff435d5440c3096f20fa72d63444608a5aef64dd581",
+ "sha256:5363427bc6a8c3b1719f8f3845ea53553d301382928a86e8fab7984426949bce",
+ "sha256:54feb7b4f2f6706bb82325e836a01be805770443e2400f706e824e91f6441dde",
+ "sha256:57175aa93f804d2c08d2edb7213e09276bd49097611aefc37e3fa38d1fb99ad0",
+ "sha256:5bcaaf624fd24e6a0cb14beac37677f90793a96864c67c064a91458611446e83",
+ "sha256:60c299e555f87e4c01b2eca085dfccda1dde87f5a604bb45c2906b8305819a93",
+ "sha256:631599598e2c76ded400c0a8722dec09217c89ff64dc54b060f598ed68e7d2a8",
+ "sha256:63937bd0f4d1cb56653dc7ae900d6c52c41f0015e25aaf9902481ee79943b33a",
+ "sha256:66daa6ac5de4288a5bbfbe55b4caa7bf0cd26b3269c7a476ffe8ce45f837f87d",
+ "sha256:6938cc2de153bc927ed8d71c7d2f2ae01b4e96359126c602721340eb7ce1a92d",
+ "sha256:6d772edc6a5f7835635c7562f6688e031f0b97e31d538412a852c49c9a6c92d5",
+ "sha256:6db5faf064b5bab9675c32a873436b31e01d66ca6984c6f7f92621656033a708",
+ "sha256:73fd300f501a052f2ba52ede721232212f3b06503fa12665408ecfc9d8fd149c",
+ "sha256:79feb4d00b2a4e0e05c9c56df707934f41fcb5fe53fd9efb7549068d0495b758",
+ "sha256:7aa7d5457b6c542ecaed79cec4ad98534373c9757383973e638ccced0f11f46d",
+ "sha256:7b0803e9008c62a7ef79058233db7ff6f37a9933b8f2573c05b07ddafa226611",
+ "sha256:7e03bea66af33c95ce3addf87a9bf1fcad8d33e757bc479957ddbc0e4f7207ac",
+ "sha256:864c4b7083eeee250ed55135d2127b260d7eb4b5e953a9e5df09c852e327961b",
+ "sha256:8766ece9de08527deabcd7cb1b4f1a967a385d26e33e536d6d8913db6ef74f06",
+ "sha256:87808a8d1e0bd62a01cafc41f0fd6818b5a5d0ca0d8a55326a81643cdda8f873",
+ "sha256:907ad09cfab21e3c86e8f1f87858f7049d1097f77196959c033612f532b4e592",
+ "sha256:95b67aa7eff150f075fda09d11f6bfb26edffd300f6ab1666759547581e8f666",
+ "sha256:978e8b5f13e52cf23a9e80f3286d7546baa70bc4ef35b51d97a709d0b28e537c",
+ "sha256:9b6943885b2d49c48d0cff23b16be830ba46b0152d98f62de49e735c6e655a63",
+ "sha256:9c1ba843ae20db09b9d5c80475376168feb2640ce91cd9906414f23cc267a1ff",
+ "sha256:a14229ac62adcf1b90a15992f1ab9c69ae8b99ffb23cb64a90878a6e8a2f5b81",
+ "sha256:a36515b1328dc5b3ffce79fe204985ca8572525452eacabee2166f44bb387b2c",
+ "sha256:ac9c8a458245c7de80bc1b9765b177055efff5803f08e548dd4bb9ab9a8d789b",
+ "sha256:ad64a14b1e56e702e19b24aae108f18ad1bf7777f3af5fcd39f87d0c5a814449",
+ "sha256:b09c52ed43a461994716082ee7d87618096851319bf695d57ec123f2ab708951",
+ "sha256:b45306a1fc5f53c9330fbee134d8b3227fe5da2ab09813b892790400aa49352d",
+ "sha256:b5b007bb22ea4b255d3ee39dfd06d12534de2fcc3438567d9f48cdaf67ae1ae3",
+ "sha256:b7e7f140c5169798f90b80d6e607ed2ba5059784968a004107c88ad61fb3641d",
+ "sha256:b9122094e3f24aa759c38f46bd8863433820654927370250f460ae75488b66ea",
+ "sha256:bb7a7807523a31f03061288cc4ffc065d684c39db7644c676b47d89553c0d714",
+ "sha256:be927c3c94c74b05128089a955fba86501c3b544d1d300282cc1b4bd370cb418",
+ "sha256:bfde8a130bd0f239e45503ab39fab239ace094d63ee1d6b67c25a63d741c0f71",
+ "sha256:c6f8947d3dfd7f91066c5b4385812c18be26c9d5a99ca56667547f2c39149d94",
+ "sha256:c7e8f88f79308d86d8f39c491773cbb533d6cb7fa6476f35d711076ee04fceb6",
+ "sha256:ca916919793a77e4a98d4a1701e345d337ce53be4a16620f063191f7322ac80f",
+ "sha256:cf243da9e42d914036fd362ac3fa77d80a41cadcd11ad789b1b5eec4daaf67ca",
+ "sha256:d6f254d096d84156a46a84861183c183d30734e52383602443292644d895047c",
+ "sha256:dbd79caaf77a3f590cbe32dc2447f718772d6eea59656a7dcb9311161b10fa75",
+ "sha256:ddb52499d0b3ed4aa88746aaf6f36a08314677d5c346234c3987ddc506404eac",
+ "sha256:e90a8e237753c83b8e484d478d9a996dc5e39fd5bd4c6ce32563bc8123f132be",
+ "sha256:e9c0afebbe6ce177ae8edba0c7c4d626f2a0fc12c33bb993d163817c41a7a05c",
+ "sha256:f11b300027ce19a34f6d24ebb0a25fd0e24a9d53353225a5c1e6cadbf2916b2e",
+ "sha256:f1ade7f31675db00b514b98f9ab9a7698c7282dad4be7492589109471852d398",
+ "sha256:f8f4a901a3fa28969d6e4519deceab56c55a09d691ea7b12ca830e2fa3461e34",
+ "sha256:fdec6e2368ae4f796fc72fad7fd4bd1753715187e6d870932b0904609e7c878e",
+ "sha256:ff3e9c11aa260c31493d4b3197d1e28dd07768594a4f92bec4506849d736248f",
+ "sha256:ff71447cb778a4f772ddc4ce360e6ba9c95527ed84a52096bd1bbf9fee2ec7c0"
],
"markers": "python_version >= '3.9'",
- "version": "==0.6.3"
+ "version": "==0.7.8"
},
"mypy": {
"hashes": [
- "sha256:0c01c99d626380752e527d5ce8e69ffbba2046eb8a060db0329690849cf9b6f9",
- "sha256:0dde5cb375cb94deff0d4b548b993bec52859d1651e073d63a1386d392a95495",
- "sha256:0e3c3d1e1d62e678c339e7ade72746a9e0325de42cd2cccc51616c7b2ed1a018",
- "sha256:0ea4fd21bb48f0da49e6d3b37ef6bd7e8228b9fe41bbf4d80d9364d11adbd43c",
- "sha256:0fb3115cb8fa7c5f887c8a8d81ccdcb94cff334684980d847e5a62e926910e1d",
- "sha256:11f7254c15ab3f8ed68f8e8f5cbe88757848df793e31c36aaa4d4f9783fd08ab",
- "sha256:120cffe120cca5c23c03c77f84abc0c14c5d2e03736f6c312480020082f1994b",
- "sha256:16f76ff3f3fd8137aadf593cb4607d82634fca675e8211ad75c43d86033ee6c6",
- "sha256:1cf9c59398db1c68a134b0b5354a09a1e124523f00bacd68e553b8bd16ff3299",
- "sha256:318ba74f75899b0e78b847d8c50821e4c9637c79d9a59680fc1259f29338cb3e",
- "sha256:3210d87b30e6af9c8faed61be2642fcbe60ef77cec64fa1ef810a630a4cf671c",
- "sha256:34ec1ac66d31644f194b7c163d7f8b8434f1b49719d403a5d26c87fff7e913f7",
- "sha256:37af5166f9475872034b56c5efdcf65ee25394e9e1d172907b84577120714364",
- "sha256:3ad925b14a0bb99821ff6f734553294aa6a3440a8cb082fe1f5b84dfb662afb1",
- "sha256:510c014b722308c9bd377993bcbf9a07d7e0692e5fa8fc70e639c1eb19fc6bee",
- "sha256:6016c52ab209919b46169651b362068f632efcd5eb8ef9d1735f6f86da7853b2",
- "sha256:6148ede033982a8c5ca1143de34c71836a09f105068aaa8b7d5edab2b053e6c8",
- "sha256:63ea6a00e4bd6822adbfc75b02ab3653a17c02c4347f5bb0cf1d5b9df3a05835",
- "sha256:7686ed65dbabd24d20066f3115018d2dce030d8fa9db01aa9f0a59b6813e9f9e",
- "sha256:7a500ab5c444268a70565e374fc803972bfd1f09545b13418a5174e29883dab7",
- "sha256:8f44f2ae3c58421ee05fe609160343c25f70e3967f6e32792b5a78006a9d850f",
- "sha256:a18d8abdda14035c5718acb748faec09571432811af129bf0d9e7b2d6699bf18",
- "sha256:a31e4c28e8ddb042c84c5e977e28a21195d086aaffaf08b016b78e19c9ef8106",
- "sha256:a9ac09e52bb0f7fb912f5d2a783345c72441a08ef56ce3e17c1752af36340a39",
- "sha256:b9d491295825182fba01b6ffe2c6fe4e5a49dbf4e2bb4d1217b6ced3b4797bc6",
- "sha256:c14a98bc63fd867530e8ec82f217dae29d0550c86e70debc9667fff1ec83284e",
- "sha256:c3385246593ac2b97f155a0e9639be906e73534630f663747c71908dfbf26134",
- "sha256:cabbee74f29aa9cd3b444ec2f1e4fa5a9d0d746ce7567a6a609e224429781f53",
- "sha256:cb64b0ba5980466a0f3f9990d1c582bcab8db12e29815ecb57f1408d99b4bff7",
- "sha256:cf7d84f497f78b682edd407f14a7b6e1a2212b433eedb054e2081380b7395aa3",
- "sha256:e2c1101ab41d01303103ab6ef82cbbfedb81c1a060c868fa7cc013d573d37ab5",
- "sha256:f188dcf16483b3e59f9278c4ed939ec0254aa8a60e8fc100648d9ab5ee95a431",
- "sha256:f2e36bed3c6d9b5f35d28b63ca4b727cb0228e480826ffc8953d1892ddc8999d",
- "sha256:f3e19e3b897562276bb331074d64c076dbdd3e79213f36eed4e592272dabd760",
- "sha256:f6b874ca77f733222641e5c46e4711648c4037ea13646fd0cdc814c2eaec2528",
- "sha256:f75e60aca3723a23511948539b0d7ed514dda194bc3755eae0bfc7a6b4887aa7",
- "sha256:fc51a5b864f73a3a182584b1ac75c404396a17eced54341629d8bdcb644a5bba",
- "sha256:fd4a985b2e32f23bead72e2fb4bbe5d6aceee176be471243bd831d5b2644672d"
+ "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd",
+ "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b",
+ "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1",
+ "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba",
+ "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b",
+ "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045",
+ "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac",
+ "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6",
+ "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a",
+ "sha256:409088884802d511ee52ca067707b90c883426bd95514e8cfda8281dc2effe24",
+ "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957",
+ "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042",
+ "sha256:4f28f99c824ecebcdaa2e55d82953e38ff60ee5ec938476796636b86afa3956e",
+ "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec",
+ "sha256:7bcfc336a03a1aaa26dfce9fff3e287a3ba99872a157561cbfcebe67c13308e3",
+ "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718",
+ "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f",
+ "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331",
+ "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1",
+ "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1",
+ "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13",
+ "sha256:b13cfdd6c87fc3efb69ea4ec18ef79c74c3f98b4e5498ca9b85ab3b2c2329a67",
+ "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2",
+ "sha256:b7951a701c07ea584c4fe327834b92a30825514c868b1f69c30445093fdd9d5a",
+ "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b",
+ "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8",
+ "sha256:c608937067d2fc5a4dd1a5ce92fd9e1398691b8c5d012d66e1ddd430e9244376",
+ "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef",
+ "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288",
+ "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75",
+ "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74",
+ "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250",
+ "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab",
+ "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6",
+ "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247",
+ "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925",
+ "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e",
+ "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
- "version": "==1.19.0"
+ "version": "==1.19.1"
},
"mypy-extensions": {
"hashes": [
@@ -428,11 +420,11 @@
},
"pathspec": {
"hashes": [
- "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08",
- "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"
+ "sha256:bac5cf97ae2c2876e2d25ebb15078eb04d76e4b98921ee31c6f85ade8b59444d",
+ "sha256:e80767021c1cc524aa3fb14bedda9c34406591343cc42797b386ce7b9354fb6c"
],
- "markers": "python_version >= '3.8'",
- "version": "==0.12.1"
+ "markers": "python_version >= '3.9'",
+ "version": "==1.0.3"
},
"pip": {
"hashes": [
@@ -530,11 +522,11 @@
},
"sqlparse": {
"hashes": [
- "sha256:4396a7d3cf1cd679c1be976cf3dc6e0a51d0111e87787e7a8d780e7d5a998f9e",
- "sha256:99a9f0314977b76d776a0fcb8554de91b9bb8a18560631d6bc48721d07023dcb"
+ "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba",
+ "sha256:e20d4a9b0b8585fdf63b10d30066c7c94c5d7a7ec47c889a2d83a3caa93ff28e"
],
"markers": "python_version >= '3.8'",
- "version": "==0.5.4"
+ "version": "==0.5.5"
},
"typing-extensions": {
"hashes": [
@@ -546,11 +538,11 @@
},
"tzdata": {
"hashes": [
- "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8",
- "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"
+ "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1",
+ "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7"
],
"markers": "python_version >= '2'",
- "version": "==2025.2"
+ "version": "==2025.3"
},
"tzlocal": {
"hashes": [
@@ -562,11 +554,11 @@
},
"urllib3": {
"hashes": [
- "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760",
- "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"
+ "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed",
+ "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"
],
"markers": "python_version >= '3.9'",
- "version": "==2.5.0"
+ "version": "==2.6.3"
},
"vine": {
"hashes": [
@@ -605,48 +597,48 @@
},
"mypy": {
"hashes": [
- "sha256:0c01c99d626380752e527d5ce8e69ffbba2046eb8a060db0329690849cf9b6f9",
- "sha256:0dde5cb375cb94deff0d4b548b993bec52859d1651e073d63a1386d392a95495",
- "sha256:0e3c3d1e1d62e678c339e7ade72746a9e0325de42cd2cccc51616c7b2ed1a018",
- "sha256:0ea4fd21bb48f0da49e6d3b37ef6bd7e8228b9fe41bbf4d80d9364d11adbd43c",
- "sha256:0fb3115cb8fa7c5f887c8a8d81ccdcb94cff334684980d847e5a62e926910e1d",
- "sha256:11f7254c15ab3f8ed68f8e8f5cbe88757848df793e31c36aaa4d4f9783fd08ab",
- "sha256:120cffe120cca5c23c03c77f84abc0c14c5d2e03736f6c312480020082f1994b",
- "sha256:16f76ff3f3fd8137aadf593cb4607d82634fca675e8211ad75c43d86033ee6c6",
- "sha256:1cf9c59398db1c68a134b0b5354a09a1e124523f00bacd68e553b8bd16ff3299",
- "sha256:318ba74f75899b0e78b847d8c50821e4c9637c79d9a59680fc1259f29338cb3e",
- "sha256:3210d87b30e6af9c8faed61be2642fcbe60ef77cec64fa1ef810a630a4cf671c",
- "sha256:34ec1ac66d31644f194b7c163d7f8b8434f1b49719d403a5d26c87fff7e913f7",
- "sha256:37af5166f9475872034b56c5efdcf65ee25394e9e1d172907b84577120714364",
- "sha256:3ad925b14a0bb99821ff6f734553294aa6a3440a8cb082fe1f5b84dfb662afb1",
- "sha256:510c014b722308c9bd377993bcbf9a07d7e0692e5fa8fc70e639c1eb19fc6bee",
- "sha256:6016c52ab209919b46169651b362068f632efcd5eb8ef9d1735f6f86da7853b2",
- "sha256:6148ede033982a8c5ca1143de34c71836a09f105068aaa8b7d5edab2b053e6c8",
- "sha256:63ea6a00e4bd6822adbfc75b02ab3653a17c02c4347f5bb0cf1d5b9df3a05835",
- "sha256:7686ed65dbabd24d20066f3115018d2dce030d8fa9db01aa9f0a59b6813e9f9e",
- "sha256:7a500ab5c444268a70565e374fc803972bfd1f09545b13418a5174e29883dab7",
- "sha256:8f44f2ae3c58421ee05fe609160343c25f70e3967f6e32792b5a78006a9d850f",
- "sha256:a18d8abdda14035c5718acb748faec09571432811af129bf0d9e7b2d6699bf18",
- "sha256:a31e4c28e8ddb042c84c5e977e28a21195d086aaffaf08b016b78e19c9ef8106",
- "sha256:a9ac09e52bb0f7fb912f5d2a783345c72441a08ef56ce3e17c1752af36340a39",
- "sha256:b9d491295825182fba01b6ffe2c6fe4e5a49dbf4e2bb4d1217b6ced3b4797bc6",
- "sha256:c14a98bc63fd867530e8ec82f217dae29d0550c86e70debc9667fff1ec83284e",
- "sha256:c3385246593ac2b97f155a0e9639be906e73534630f663747c71908dfbf26134",
- "sha256:cabbee74f29aa9cd3b444ec2f1e4fa5a9d0d746ce7567a6a609e224429781f53",
- "sha256:cb64b0ba5980466a0f3f9990d1c582bcab8db12e29815ecb57f1408d99b4bff7",
- "sha256:cf7d84f497f78b682edd407f14a7b6e1a2212b433eedb054e2081380b7395aa3",
- "sha256:e2c1101ab41d01303103ab6ef82cbbfedb81c1a060c868fa7cc013d573d37ab5",
- "sha256:f188dcf16483b3e59f9278c4ed939ec0254aa8a60e8fc100648d9ab5ee95a431",
- "sha256:f2e36bed3c6d9b5f35d28b63ca4b727cb0228e480826ffc8953d1892ddc8999d",
- "sha256:f3e19e3b897562276bb331074d64c076dbdd3e79213f36eed4e592272dabd760",
- "sha256:f6b874ca77f733222641e5c46e4711648c4037ea13646fd0cdc814c2eaec2528",
- "sha256:f75e60aca3723a23511948539b0d7ed514dda194bc3755eae0bfc7a6b4887aa7",
- "sha256:fc51a5b864f73a3a182584b1ac75c404396a17eced54341629d8bdcb644a5bba",
- "sha256:fd4a985b2e32f23bead72e2fb4bbe5d6aceee176be471243bd831d5b2644672d"
+ "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd",
+ "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b",
+ "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1",
+ "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba",
+ "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b",
+ "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045",
+ "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac",
+ "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6",
+ "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a",
+ "sha256:409088884802d511ee52ca067707b90c883426bd95514e8cfda8281dc2effe24",
+ "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957",
+ "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042",
+ "sha256:4f28f99c824ecebcdaa2e55d82953e38ff60ee5ec938476796636b86afa3956e",
+ "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec",
+ "sha256:7bcfc336a03a1aaa26dfce9fff3e287a3ba99872a157561cbfcebe67c13308e3",
+ "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718",
+ "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f",
+ "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331",
+ "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1",
+ "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1",
+ "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13",
+ "sha256:b13cfdd6c87fc3efb69ea4ec18ef79c74c3f98b4e5498ca9b85ab3b2c2329a67",
+ "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2",
+ "sha256:b7951a701c07ea584c4fe327834b92a30825514c868b1f69c30445093fdd9d5a",
+ "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b",
+ "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8",
+ "sha256:c608937067d2fc5a4dd1a5ce92fd9e1398691b8c5d012d66e1ddd430e9244376",
+ "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef",
+ "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288",
+ "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75",
+ "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74",
+ "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250",
+ "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab",
+ "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6",
+ "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247",
+ "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925",
+ "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e",
+ "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
- "version": "==1.19.0"
+ "version": "==1.19.1"
},
"mypy-extensions": {
"hashes": [
diff --git a/README.md b/README.md
index 6734d90..df30c91 100644
--- a/README.md
+++ b/README.md
@@ -87,7 +87,7 @@ python -m pip install pipenv
Then run
```sh
-pipenv --python 3.6.9 install
+pipenv --python 3.12 install
```
To create and activate the virtualenv. Once in the shell, you can go back with the `exit` command and reactivate the virtualenv simply `pipenv shell`
@@ -124,36 +124,6 @@ Create multiple users at once, with randomly generated password.
ENV=dev pipenv run ./manage.py createaccount guestA@uni.lu guestB@uni.lu
```
-Index a collection stored in the db using its :
-
-```sh
-ENV=dev ./manage.py synccollection test-abcd
-```
-
-Export query as csv using (first argument being `user_id` then the solr query):
-
-```sh
-ENV=dev ./manage.py exportqueryascsv 1 "content_txt_fr:\"premier ministre portugais\""
-```
-
-Create (or get) a collection:
-
-```sh
-ENV=dev pipenv run ./manage.py createcollection "name of the collection" my-username
-```
-
-Then once you get the collection id, usually a concatenation of the creator profile uid and of the slugified version of the desired name, you can add query results to the collection:
-
-```sh
-ENV=dev pipenv run python ./manage.py addtocollectionfromquery local-user_name-of-the-collection "content_txt_fr:\"premier ministre portugais\""
-```
-
-Index a collection from a list of tr-passages ids resulting from a solr query:
-
-```sh
-ENV=dev pipenv run python ./manage.py addtocollectionfromtrpassagesquery local-dg-abcde "cluster_id_s:tr-nobp-all-v01-c8590083914"
-```
-
Stop a specific job from command line:
```sh
@@ -165,7 +135,7 @@ ENV=dev pipenv run python ./manage.py stopjob 1234
Specify the environment variable `ENV=test` to run the tests with the `console` email backend:
```sh
-EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend ENV=dev pipenv run ./manage.py test
+EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend ENV=test pipenv run ./manage.py test
```
## Use in production
@@ -178,13 +148,6 @@ Test image locally:
make run
```
-### Note on collection syncronisation between indices. (now deprecated, see impresso-middle-layer for more details)
-
-Collections are simple identifiers assigned to a set of newspaper articles and stored in the `search` index. However, other indices (e.g. `tr_passages`) can be linked to a collection to allow cross-indices search.
-The task of creating a collection is a long running one because it uses a solr search query to filter the `content items` and a solr update request to add the collection tag to the various indices. Every search request is limited to `settings.IMPRESSO_SOLR_EXEC_LIMIT` rows (100 by default) and the number of loops is limited to the user `max_allowed_loops` parameter in the database and in general cannot be higher of `settings.IMPRESSO_SOLR_MAX_LOOPS` (100 recommended for a total of 100\*100 rows default max). Set both parameters in the `.env` file accordingly.
-
-The task of creating a collection is delegated to the _Celery_ task manager and a `Job` instance stored in the database is assigned to the task to allow the follow-up of the task progress. The task is executed asynchronously. In the future releases, the user will be notified via email when the task is completed (still todo).
-
### Using a proxy
If the database is only accessible via a socks proxy, add the following to your `.env` file:
diff --git a/impresso/management/commands/additemstocollection.py b/impresso/management/commands/additemstocollection.py
deleted file mode 100644
index 0bd9a72..0000000
--- a/impresso/management/commands/additemstocollection.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import requests, json
-from django.core.management.base import BaseCommand, CommandError
-from impresso.models import Collection, CollectableItem
-from impresso.tasks import store_collection, count_items_in_collection
-from django.conf import settings
-
-
-class Command(BaseCommand):
- help = 'add specific articles id to a specific collection. Usage: ENV=prod pipenv run ./manage.py additemstocollection local-dg-AN5AoosL IMP-2005-04-28-a-i0365 LSE-1924-03-07-a-i0026'
-
- def add_arguments(self, parser):
- parser.add_argument('collection_id', type=str)
- parser.add_argument('items_ids', nargs='+', type=str)
-
- def handle(self, *args, collection_id, items_ids, **options):
- self.stdout.write('sync: %s' % collection_id)
- collection = Collection.objects.get(pk=collection_id)
- self.stdout.write('start syncing collection "%s"(pk=%s)...' % (collection.name, collection.pk))
- self.stdout.write('items: %s' % items_ids)
- result = collection.add_items_to_index(items_ids=items_ids)
- self.stdout.write('result: %s' % result)
diff --git a/impresso/management/commands/addtocollectionfromquery.py b/impresso/management/commands/addtocollectionfromquery.py
deleted file mode 100644
index cab7403..0000000
--- a/impresso/management/commands/addtocollectionfromquery.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from django.core.management.base import BaseCommand
-from impresso.models import Collection, CollectableItem
-from impresso.tasks import add_to_collection_from_query
-
-
-class Command(BaseCommand):
- help = "Add content items from a solr query"
-
- def add_arguments(self, parser):
- parser.add_argument("collection_id", type=str)
- parser.add_argument("q", type=str)
-
- def handle(self, collection_id, q, *args, **options):
- collection = Collection.objects.get(pk=collection_id)
- self.stdout.write('collection to fill: "%s"' % collection.pk)
- self.stdout.write('query: "%s"' % q)
-
- add_to_collection_from_query.delay(
- collection_id=collection.pk,
- user_id=collection.creator.pk,
- query=q,
- content_type=CollectableItem.ARTICLE,
- )
- self.stdout.write('"add_to_collection_from_query" task launched.')
diff --git a/impresso/management/commands/addtocollectionfromtrpassagesquery.py b/impresso/management/commands/addtocollectionfromtrpassagesquery.py
deleted file mode 100644
index 8b70500..0000000
--- a/impresso/management/commands/addtocollectionfromtrpassagesquery.py
+++ /dev/null
@@ -1,88 +0,0 @@
-import logging
-from django.core.management.base import BaseCommand
-from impresso.models import Collection
-from impresso.tasks import add_to_collection_from_tr_passages_query
-from impresso.utils.tasks.textreuse import add_tr_passages_query_results_to_collection
-
-logger = logging.getLogger(__name__)
-
-
-class Command(BaseCommand):
- """
- usage ENV=dev pipenv run python ./manage.py addtocollectionfromtrpassagesquery
-
- Example: add to a specific collection all content items where passages belongs to a cluster_id_s
- e.g. ENV=dev pipenv run python ./manage.py addtocollectionfromtrpassagesquery local-dg-C7aXRWeC "cluster_id_s:tr-nobp-all-v01-c8590083914"
- """
-
- help = "Add content_items to one collection from a solr query on TR_PASSAGES index, collapsed by content_items"
-
- def add_arguments(self, parser):
- parser.add_argument("collection_id", type=str)
- parser.add_argument("q", type=str)
-
- parser.add_argument(
- "--immediate",
- action="store_true",
- help="avoid delay tasks using celery (do not use in production)",
- )
- parser.add_argument(
- "--skip",
- type=int,
- default=0,
- action="store",
- help="skip n content items",
- )
- parser.add_argument(
- "--limit",
- type=int,
- default=10,
- action="store",
- help="skip n content items",
- )
-
- def handle(
- self, collection_id, q, immediate=False, skip=0, limit=10, *args, **options
- ):
- self.stdout.write(
- self.style.SUCCESS(
- "addtocollectionfromtrpassagesquery collection_id={collection_id}"
- )
- )
- self.stdout.write(
- f"Command launched with args: \n"
- f" query={q} immediate={immediate} skip={skip} limit={limit}"
- )
-
- collection = Collection.objects.get(pk=collection_id)
- self.stdout.write(f"Collection found: collection.pk={collection.pk}")
- if immediate:
- loop_skip = skip
- page, loops, progress = add_tr_passages_query_results_to_collection(
- collection_id=collection.pk,
- query=q,
- skip=loop_skip,
- limit=limit,
- # logger=logger,
- )
- self.stdout.write(f"progress={progress} page={page} loops={loops}")
- while page < loops:
- loop_skip = loop_skip + limit
- page, loops, progress = add_tr_passages_query_results_to_collection(
- collection_id=collection.pk,
- query=q,
- skip=loop_skip,
- limit=limit,
- # logger=logger,
- )
- self.stdout.write(f"progress={progress} page={page} loops={loops}")
- # update collection count_items
- collection.update_count_items()
- else:
- add_to_collection_from_tr_passages_query.delay(
- collection_id=collection.pk,
- user_id=collection.creator.pk,
- query=q,
- skip=skip,
- limit=limit,
- )
diff --git a/impresso/management/commands/deletecollection.py b/impresso/management/commands/deletecollection.py
deleted file mode 100644
index 9c9eae8..0000000
--- a/impresso/management/commands/deletecollection.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import sys
-from django.core.management.base import BaseCommand
-from impresso.models import Collection
-from impresso.tasks import remove_collection
-
-
-class Command(BaseCommand):
- help = "count collected items in a collection and syncronize with SOLR"
-
- def add_arguments(self, parser):
- parser.add_argument("collection_ids", nargs="+", type=str)
- parser.add_argument("user_id", type=str, help="user id (int) or 'admin'")
-
- def handle(self, user_id, collection_ids, *args, **options):
- self.stdout.write(self.style.HTTP_INFO(f"\n\ndeletecollection:"))
- self.stdout.write(
- f" - collection_ids={collection_ids} \n - user_id={user_id}\n\n",
- )
- collections = Collection.objects.filter(pk__in=collection_ids)
- self.stdout.write(f"n. collection to delete: {collections.count()}")
-
- # if user_id is not a number string:
- if not user_id.isdigit() and not user_id == "admin":
- self.stderr.write(
- f"invalid user_id: {user_id}, should be a digit or 'admin'"
- )
- sys.exit(1)
-
- for collection_id in collection_ids:
- self.stdout.write(f"delay task: remove_collection (pk={collection_id})...")
- try:
- collection = Collection.objects.get(pk=collection_id)
- collection.status = Collection.DELETED
- collection.save()
- except Collection.DoesNotExist:
- self.stdout.write(
- f"collection {collection_id} does not exist, but let's try to delete it from solr..."
- )
- remove_collection.delay(
- collection_id=collection_id,
- user_id=collection.creator.pk if user_id == "admin" else user_id,
- )
diff --git a/impresso/management/commands/exportqueryascsv.py b/impresso/management/commands/exportqueryascsv.py
deleted file mode 100644
index dbfbc3a..0000000
--- a/impresso/management/commands/exportqueryascsv.py
+++ /dev/null
@@ -1,182 +0,0 @@
-from django.contrib.auth.models import User
-from django.core.management.base import BaseCommand
-from impresso.models import UserBitmap, Job, Attachment
-from impresso.utils.bitmap import check_bitmap_keys_overlap
-from impresso.tasks import export_query_as_csv
-from django.conf import settings
-from impresso.solr import find_all
-from impresso.utils.bitmask import BitMask64, is_access_allowed
-from impresso.utils.solr import serialize_solr_doc_content_item_to_plain_dict
-from impresso.utils.tasks.export import helper_export_query_as_csv_progress
-
-
-class Command(BaseCommand):
- help = "Export a SOLR query as a CSV file. Query format is the string for the q parameter in the SOLR query."
-
- def add_arguments(self, parser):
- parser.add_argument("user_id", type=str)
- parser.add_argument("q", type=str)
- parser.add_argument(
- "--no_prompt",
- action="store_true",
- help="Do not prompt for confirmation before running the task",
- )
- parser.add_argument(
- "--immediate",
- action="store_true",
- help="Run the function behind the task immediately instead of delaying it with Celery",
- )
- parser.add_argument(
- "--query_hash",
- type=str,
- help="The hash of the query string, if any, used to identify the query in the database",
- )
-
- def handle(
- self,
- user_id,
- q,
- no_prompt=False,
- immediate=False,
- query_hash="",
- *args,
- **options,
- ):
- self.stdout.write("\n\n--- Export Solr Query as CSV file ---")
- self.stdout.write("Params \033[34m❤️\033[0m")
- self.stdout.write(f" user_id: {user_id}")
- self.stdout.write(f" q: {q}")
- self.stdout.write(f" --no_prompt: {no_prompt}")
- self.stdout.write(f" --immediate: {immediate}\n\n")
-
- user = User.objects.get(pk=user_id)
-
- self.stdout.write('user id: "%s"' % user.pk)
- self.stdout.write('user uid: "%s"' % user.profile.uid)
- self.stdout.write(f"query q: {q}")
- self.stdout.write(
- f"settings.IMPRESSO_SOLR_FIELDS used in `fl` field: {settings.IMPRESSO_SOLR_FIELDS}"
- )
- # print out IMPRESSO_SOLR_URL
- self.stdout.write(
- f"settings.IMPRESSO_SOLR_URL_SELECT: \033[34m{settings.IMPRESSO_SOLR_URL_SELECT}\033[0m"
- )
- # print out user settings from profile
- self.stdout.write("\nuser profile settings:")
- self.stdout.write(
- f" max_loops_allowed: \033[34m{user.profile.max_loops_allowed}\033[0m"
- )
- self.stdout.write(
- f" max_parallel_jobs: \033[34m{user.profile.max_parallel_jobs}\033[0m"
- )
- # bitmap
- try:
- user_bitmap_as_int = user.bitmap.get_bitmap_as_int()
-
- except User.bitmap.RelatedObjectDoesNotExist:
- user_bitmap_as_int = UserBitmap.USER_PLAN_GUEST
- self.stdout.write(
- self.style.WARNING(
- f" no bitmap found for user, using default bitmap: {bin(user_bitmap_as_int)}"
- )
- )
-
- self.stdout.write(
- f" user_current_bitmap: \033[34m{bin(user_bitmap_as_int)}\033[0m"
- )
- user_bitmap_as_str = BitMask64(user_bitmap_as_int)
- self.stdout.write(f" user bitmap as str: \033[34m{user_bitmap_as_str}\033[0m")
-
- # bitmap print out as base64
-
- # test query
- results = find_all(q=q, fl=settings.IMPRESSO_SOLR_FIELDS)
- self.stdout.write(
- f"\ntotal documents found: {self.style.SUCCESS(results['response']['numFound'])}\n\n"
- )
-
- if not results["response"]["numFound"]:
- self.stdout.write(" no results found, aborting.")
- return
-
- # print out first Solr document as content item properties
- self.stdout.write(f"First document found as example:")
-
- first_doc = results["response"]["docs"][0]
- first_content_item = serialize_solr_doc_content_item_to_plain_dict(first_doc)
- for k, v in first_content_item.items():
- self.stdout.write(f" {k}: \033[34m{v}\033[0m")
-
- # check that user has right to export using the bitmaps
- if "_bm_get_tr_i" in first_doc.keys():
- self.stdout.write(
- "\n\nCheck if user has right to export the first result Transcript using the bitmap"
- )
- # if bitmap is a string of 0 and 1, convert it to int first
- first_content_item_bitmap = first_content_item["_bm_get_tr_i"]
- self.stdout.write(
- f" content bitmap: \033[34m{first_content_item_bitmap}\033[0m"
- )
- overlap = is_access_allowed(
- accessor=user_bitmap_as_str,
- content=BitMask64(first_content_item_bitmap),
- )
- if overlap:
- self.stdout.write(
- self.style.SUCCESS(" user can get the transcript of this document")
- )
- else:
- self.stdout.write(
- self.style.WARNING(
- " user has no right to get the transcript this document"
- )
- )
- else:
- self.stdout.write(
- self.style.WARNING(
- " no field `bm_get_tr` found in the first document, user has no right to export the transcript this document"
- )
- )
- if not no_prompt:
- confirm = input(
- self.style.NOTICE(
- "\n\nDo you want to proceed with exporting the query as CSV? (type 'y' for yes): "
- )
- )
- if confirm.lower() != "y":
- self.stdout.write(
- self.style.WARNING(
- "Export cancelled by user. Use --no_prompt optional arg to avoid the confirmation.\n\n"
- )
- )
- return
- if not immediate:
- export_query_as_csv.delay(
- query=q,
- user_id=user_id,
- description="from command management",
- query_hash=query_hash,
- )
- self.stdout.write('"test" task launched, check celery.')
- self.stdout.write("\n\n---- end ----\n\n")
- return
- # run the function immediately,
- # save current job then start export_query_as_csv task.
- job = Job.objects.create(
- type=Job.EXPORT_QUERY_AS_CSV,
- creator_id=user_id,
- description="from command management",
- )
- attachment = Attachment.create_from_job(job, extension="csv")
- self.stdout.write(f"job created: {job}")
- self.stdout.write(
- f"attachment created: {self.style.SUCCESS(attachment.upload.path)}"
- )
- page, loops, progress = helper_export_query_as_csv_progress(
- job=job,
- query=q,
- query_hash=query_hash,
- user_bitmap_key=user_bitmap_as_str,
- )
- self.stdout.write(f"page: {page}, loops: {loops}, progress: {progress}")
- self.stdout.write("\n\n---- end ----\n\n")
diff --git a/impresso/management/commands/synccollectableitems.py b/impresso/management/commands/synccollectableitems.py
deleted file mode 100644
index af12f2d..0000000
--- a/impresso/management/commands/synccollectableitems.py
+++ /dev/null
@@ -1,128 +0,0 @@
-import logging, timeit, requests, itertools, datetime, json
-
-from django.conf import settings
-from django.core.management.base import BaseCommand
-from django.core.paginator import Paginator
-from impresso.models import CollectableItem
-from impresso.solr import find_collections_by_ids
-
-# choose the right logger
-logger = logging.getLogger(__name__)
-
-class Command(BaseCommand):
- help = 'store all collections for each collected items'
-
- def add_arguments(self, parser):
- parser.add_argument('--skip', nargs='?', type=int, default=0)
- parser.add_argument('--newspaper', nargs='?', type=str, default=None)
- parser.add_argument('--collection_id', nargs='?', type=str, default=None)
- parser.add_argument('--user_id', nargs='?', type=int, default=0)
-
- def handle(self, skip, newspaper, collection_id, user_id, *args, **options):
- self.stdout.write('sync all items! SKIP=%s'% skip)
- self.stdout.write('solr target: %s' % settings.IMPRESSO_SOLR_URL_SELECT)
- self.stdout.write('mysql port: %s' % settings.DATABASES['default']['PORT'])
- self.stdout.write('mysql name: %s' % settings.DATABASES['default']['NAME'])
- self.stdout.write('-- opt arg SKIP=%s'% skip)
- self.stdout.write('-- opt arg newspaper=%s' % newspaper)
- self.stdout.write('-- opt arg collection_id=%s' % collection_id)
- self.stdout.write('-- opt arg user_id=%s' % user_id)
- items_queryset = CollectableItem.objects.filter()
- if newspaper:
- items_queryset = items_queryset.filter(item_id__startswith=newspaper)
- if collection_id:
- items_queryset = items_queryset.filter(collection_id=collection_id)
- if user_id:
- items_queryset = items_queryset.filter(collection__creator_id=user_id)
- self.stdout.write('-- query %s' % items_queryset.query)
- items = items_queryset.values_list('item_id', flat=True).order_by('item_id').distinct()
- total = items.count()
- self.stdout.write('total items %s' % total)
- logger.debug('starting sync %s items' % total)
- logger.debug('main SQL query: "%s"' % items.query)
- c = 0
- runtime = 0.0
- chunksize = 50
- init = timeit.default_timer()
-
- paginator = Paginator(items, chunksize)
- self.stdout.write('total loops %s' % paginator.num_pages)
- logger.debug('loops needed: %s (%s per loop)' % (paginator.num_pages, chunksize))
- # for page in pages
- for page in range(skip + 1, paginator.num_pages + 1):
- if c == 0:
- start = timeit.default_timer()
- # add initial skipped elements
- c = skip * chunksize
-
- self.stdout.write('\nloop n. %s of %s\n---' % (page, paginator.num_pages))
- # get object list
- uids = [uid for uid in paginator.page(page).object_list]
-
- # get ALL the collections for those objects
- colls = CollectableItem.objects.filter(item_id__in=uids).values(
- 'item_id',
- 'collection__pk',
- 'collection__status'
- )
-
- docs = { x['id'] : x for x in find_collections_by_ids(ids=uids) }
- ucolls = [];
-
- for uid, group in itertools.groupby(colls, key=lambda x:x.get('item_id')):
- ucoll = list(filter(lambda x: x.get('collection__status') != 'DEL', list(group)))
- # filter by collection status
- ucoll_ss = [x.get('collection__pk') for x in ucoll]
- # calculate diff between mysql and solr docs
- doc = docs.get(uid, None)
-
- if doc is None:
- logger.error('AttributeError: unknown uid "%s" in SOLR %s' % (uid, settings.IMPRESSO_SOLR_URL_SELECT))
- continue
-
- diffs = set(ucoll_ss).symmetric_difference(set(doc.get('ucoll_ss', [])))
-
- if len(diffs) > 0:
- ucolls.append({
- 'id': uid,
- 'ucoll_ss': {
- 'set': ucoll_ss
- },
- '_version_': doc.get('_version_'),
- })
-
- if len(ucolls):
- self.stdout.write('n. atomic updates todo: %s / %s' % (len(ucolls), len(uids)))
-
- # print(ucolls)
- res = requests.post(settings.IMPRESSO_SOLR_URL_UPDATE,
- auth = settings.IMPRESSO_SOLR_AUTH_WRITE,
- params = {
- 'commit': 'true',
- 'versions': 'true',
- },
- data = json.dumps(ucolls),
- json=True,
- headers = {
- 'content-type': 'application/json; charset=UTF-8'
- },
- )
-
- if res.status_code == 409:
- print(res.json())
-
- res.raise_for_status()
- else:
- self.stdout.write('no atomic updates needed, collections are synced!')
-
- # updata completion count
- c = c + len(uids)
- stop = timeit.default_timer()
- runtime = stop - start
- completion = float(c) / total
-
- self.stdout.write('runtime: %s s' % runtime)
- self.stdout.write('completion: %s %%' % (completion * 100))
- self.stdout.write('ETA: %s s.' % datetime.timedelta(seconds=(runtime * 100 / completion)))
- # group by uid
- logger.debug('syncing completed on %s items in %s s' % (total, runtime))
diff --git a/impresso/management/commands/synccollection.py b/impresso/management/commands/synccollection.py
deleted file mode 100644
index b687aeb..0000000
--- a/impresso/management/commands/synccollection.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import requests, json
-from django.core.management.base import BaseCommand, CommandError
-from impresso.models import Collection, CollectableItem
-from impresso.tasks import store_collection, count_items_in_collection
-from django.conf import settings
-
-
-class Command(BaseCommand):
- help = 'count collected items in a collection and syncronize with SOLR'
-
- def add_arguments(self, parser):
- parser.add_argument('collection_id', nargs='+', type=str)
-
- def handle(self, *args, **options):
- self.stdout.write('sync: %s' % options['collection_id'])
- collections = Collection.objects.filter(pk__in=options['collection_id'])
- self.stdout.write('n. collection to sync: %s' % collections.count())
-
- for collection in collections:
- self.stdout.write('start syncing collection "%s"(pk=%s)...' % (collection.name, collection.pk))
-
- try:
- count_items_in_collection.delay(collection_id=collection.pk)
- except Exception as e:
- self.stderr.write(e)
- try:
- store_collection.delay(collection_id=collection.pk)
- except Exception as e:
- self.stderr.write(e)
diff --git a/impresso/management/commands/updatecollection.py b/impresso/management/commands/updatecollection.py
deleted file mode 100644
index 29d575b..0000000
--- a/impresso/management/commands/updatecollection.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from django.core.management.base import BaseCommand
-from impresso.models import Collection
-from django.contrib.auth.models import User
-from impresso.tasks import update_collection
-
-
-class Command(BaseCommand):
- help = "Manage articles in a user's collection"
-
- def add_arguments(self, parser):
- parser.add_argument("user_id", type=str)
- parser.add_argument("collection_id", type=str)
- parser.add_argument(
- "--add", nargs="+", type=str, help="List of article IDs to add"
- )
- parser.add_argument(
- "--remove", nargs="+", type=str, help="List of article IDs to remove"
- )
-
- def handle(self, user_id, collection_id, *args, **options):
- items_to_add = options["add"]
- items_to_remove = options["remove"]
- # Print items to add
- if items_to_add:
- self.stdout.write("\n1. Check items to add:")
- for item in items_to_add:
- self.stdout.write(f" - \033[32m{item}\033[0m")
-
- # Print items to remove
- if items_to_remove:
- self.stdout.write("\n2. Check items to remove:")
- for item in items_to_remove:
- self.stdout.write(f" - \033[33m{item}\033[0m")
-
- if not items_to_add and not items_to_remove:
- self.stderr.write(self.style.ERROR("No items to add or remove"))
- return
-
- self.stdout.write(f"\n3. Get user having user_id: {user_id}")
- try:
- user = User.objects.get(pk=user_id)
- except User.DoesNotExist:
- self.stderr.write(self.style.ERROR("User not found"))
- return
- self.stdout.write(f" - user found: \033[34m{user.username}\033[0m")
-
- self.stdout.write(
- f"\n4. Get collection with collection_id: {collection_id} and user_id: {user_id}"
- )
- try:
- collection = Collection.objects.get(id=collection_id, creator=user)
- except Collection.DoesNotExist:
- self.stderr.write(self.style.ERROR("User or collection not found"))
- return
- self.stdout.write(f" - collection found: \033[34m{collection.name}\033[0m")
-
- # collection_id, user_id, items_ids_to_add=[], items_ids_to_remove=[]
- message = update_collection.delay(
- collection_id=collection.id,
- user_id=user.id,
- items_ids_to_add=items_to_add,
- items_ids_to_remove=items_to_remove,
- )
- self.stdout.write(
- f"\n5. Task \033[36m{message.id}\033[0m launched, check celery."
- )
diff --git a/impresso/management/commands/updatecollectioncount.py b/impresso/management/commands/updatecollectioncount.py
deleted file mode 100644
index 121713f..0000000
--- a/impresso/management/commands/updatecollectioncount.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import requests, json
-from django.core.management.base import BaseCommand, CommandError
-from impresso.models import Collection, CollectableItem
-from impresso.tasks import store_collection, count_items_in_collection
-from django.conf import settings
-
-
-class Command(BaseCommand):
- help = "update count for collected items in a collection"
-
- def add_arguments(self, parser):
- parser.add_argument("collection_ids", nargs="+", type=str)
-
- def handle(self, collection_ids, *args, **options):
- self.stdout.write(self.style.HTTP_INFO(f"\n\nUpdate collection count_item"))
- self.stdout.write(
- f" - collection_idss={collection_ids}",
- )
- self.stdout.write("sync: %s" % collection_ids)
- collections = Collection.objects.filter(pk__in=collection_ids)
- self.stdout.write("n. collection to sync: %s" % collections.count())
-
- for collection in collections:
- self.stdout.write(
- 'start updating collection "{}"(pk={})...'.format(
- collection.name, collection.pk
- )
- )
- count_in_solr = collection.update_count_items()
- count_in_db = CollectableItem.objects.filter(collection=collection).count()
- self.stdout.write(
- 'done, collection "{}"(pk={}) updated, count_items: {}, in db: {}'.format(
- collection.name,
- collection.pk,
- count_in_solr,
- count_in_db,
- )
- )
diff --git a/impresso/settings.py b/impresso/settings.py
index eea46ca..4c8aee5 100644
--- a/impresso/settings.py
+++ b/impresso/settings.py
@@ -407,7 +407,9 @@
IMPRESSO_EMAIL_SUBJECT_AFTER_USER_SPECIAL_MEMBERSHIP_REQUEST_CREATED_TO_INSTITUTION = (
"A Special Membership Request has been submitted"
)
-
+IMPRESSO_EMAIL_SUBJECT_AFTER_USER_ACTIVATION_PLAN_REJECTED_TO_USER = (
+ "Access granted to Impresso (Basic User Access)"
+)
# Logging
LOGGING = {
"version": 1,
diff --git a/impresso/tasks/__init__.py b/impresso/tasks/__init__.py
index 09c76ba..fbf572d 100644
--- a/impresso/tasks/__init__.py
+++ b/impresso/tasks/__init__.py
@@ -4,8 +4,7 @@
from celery.utils.log import get_task_logger
from django.contrib.auth.models import User, Group
from ..celery import app
-from ..models import Job, Collection, CollectableItem, Attachment
-from ..models import UserBitmap
+from ..models import Job
from ..models import UserChangePlanRequest
from ..utils.tasks import (
TASKSTATE_INIT,
@@ -13,23 +12,16 @@
update_job_completed,
is_task_stopped,
)
-from ..utils.tasks.collection import helper_update_collections_in_tr_passages_progress
-from ..utils.tasks.textreuse import remove_collection_from_tr_passages
-from ..utils.tasks.textreuse import add_tr_passages_query_results_to_collection
-from ..utils.tasks.collection import (
- helper_remove_collection_progress,
- helper_store_collection_progress,
-)
-from ..utils.tasks.collection import METHOD_ADD_TO_INDEX, METHOD_DEL_FROM_INDEX
+
from ..utils.tasks.account import (
send_emails_after_user_registration,
+ send_emails_after_user_activation_plan_rejected,
send_emails_after_user_activation,
send_email_password_reset,
send_email_plan_change,
send_email_plan_change_rejected,
)
from ..utils.tasks.userBitmap import helper_update_user_bitmap
-from ..utils.tasks.export import helper_export_query_as_csv_progress
from .userSpecialMembershipRequest_tasks import *
from .userChangePlanRequest_task import *
@@ -37,6 +29,17 @@
logger = get_task_logger(__name__)
+# Define a reusable decorator with default config
+def default_task_config(func):
+ return app.task(
+ bind=True,
+ autoretry_for=(Exception,),
+ exponential_backoff=2,
+ retry_kwargs={"max_retries": 5},
+ retry_jitter=True,
+ )(func)
+
+
def get_collection_as_obj(collection):
return {
"id": collection.pk,
@@ -47,14 +50,14 @@ def get_collection_as_obj(collection):
}
-@app.task(bind=True)
+@default_task_config
def echo(self, message):
logger.info(f"Echo: {message}")
response = f"Hello world. This is your message: {message}"
return response
-@app.task(bind=True)
+@default_task_config
def test_progress(self, job_id, sleep=100, pace=0.01, progress=0.0):
# get the job so that we can update its status
job = Job.objects.get(pk=job_id)
@@ -84,7 +87,7 @@ def test_progress(self, job_id, sleep=100, pace=0.01, progress=0.0):
update_job_completed(task=self, job=job, extra=extra, logger=logger)
-@app.task(bind=True)
+@default_task_config
def test(self, user_id: int, sleep: int = 1, pace: float = 0.05):
"""
Initiates a test job and starts the test_progress task.
@@ -106,846 +109,78 @@ def test(self, user_id: int, sleep: int = 1, pace: float = 0.05):
test_progress.delay(job_id=job.pk, sleep=sleep, pace=pace)
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def export_query_as_csv_progress(
- self,
- job_id: int,
- query: str,
- user_bitmap_key: int,
- query_hash: str = "",
- progress: float = 0.0,
- skip: int = 0,
- limit: int = 100,
-) -> None:
- """
- Export query results as a CSV file with progress tracking.
-
- This task retrieves query results, writes them to a CSV file, and updates the job's progress.
- If the query has multiple pages of results, the task will recursively call itself to process
- the next page. Once all pages are processed, the CSV file is compressed into a ZIP file.
-
- Args:
- self: The task instance.
- job_id (int): The ID of the job to update.
- query (str): The query string to execute.
- user_bitmap_key (int): The user bitmap key, as int.
- query_hash (str, optional): The hash of the query. Defaults to an empty string.
- skip (int, optional): The number of records to skip. Defaults to 0.
- limit (int, optional): The maximum number of records to retrieve per page. Defaults to 100.
-
- Returns:
- None
- """
- # get the job so that we can update its status
- job = Job.objects.get(pk=job_id)
- if is_task_stopped(task=self, job=job, progress=progress, logger=logger):
- return
-
- page, loops, progress = helper_export_query_as_csv_progress(
- job=job,
- query=query,
- query_hash=query_hash,
- user_bitmap_key=user_bitmap_key,
- skip=skip,
- limit=limit,
- logger=logger,
- )
-
- if page < loops:
- job.status = Job.RUN
- update_job_progress(task=self, job=job, progress=progress, logger=logger)
- export_query_as_csv_progress.delay(
- job_id=job.pk,
- query=query,
- query_hash=query_hash,
- user_bitmap_key=user_bitmap_key,
- skip=page * limit,
- limit=limit,
- )
- else:
- update_job_completed(task=self, job=job, logger=logger)
-
-
-@app.task(bind=True)
-def export_query_as_csv(
- self,
- user_id: int,
- query: str,
- description: str,
- query_hash: str,
-) -> None:
- """
- Initiates a job to export a query as a CSV file and starts the export_query_as_csv_progress task.
-
- Args:
- self: The instance of the class.
- user_id (int): The ID of the user initiating the export.
- query (str): The query string to be exported.
- description (str, optional): A description for the job. Defaults to an empty string.
- query_hash (str, optional): A hash of the query string. Defaults to an empty string.
-
- Returns:
- None
- """
- # save current job then start export_query_as_csv task.
- job = Job.objects.create(
- type=Job.EXPORT_QUERY_AS_CSV,
- creator_id=user_id,
- description=description,
- )
- attachment = Attachment.create_from_job(job, extension="csv")
- # if decri
- # get user bitmap, if any
- user_bitmap, created = UserBitmap.objects.get_or_create(user_id=user_id)
- logger.info(
- f"[job:{job.pk} user:{user_id}] launched! "
- f"- Using bitmap {user_bitmap.get_bitmap_as_int()} (created:{created}) "
- f"- attachment:{attachment.pk}"
- )
-
- update_job_progress(
- task=self,
- job=job,
- taskstate=TASKSTATE_INIT,
- progress=0.0,
- logger=logger,
- extra={"query": query, "query_hash": query_hash},
- )
-
- export_query_as_csv_progress.delay(
- job_id=job.pk,
- query=query,
- query_hash=query_hash,
- user_bitmap_key=user_bitmap.get_bitmap_as_int(),
- )
-
-
-@app.task(bind=True)
-def export_collection_as_csv(
- self,
- user_id: int,
- collection_id: int,
- query: str,
- query_hash: str = "",
-) -> None:
- """
- Initiates a job to export a collection as a CSV file and starts the export_query_as_csv_progress task
- like export_query_as_csv.
-
- Args:
- self: The instance of the class.
- user_id (int): The ID of the user initiating the export.
- collection_id (int): The ID of the collection to be exported.
- query (str): The query string to be exported.
- query_hash (str, optional): A hash of the query string. Defaults to an empty string.
-
- Returns:
- None
-
- """
- user_bitmap, created = UserBitmap.objects.get_or_create(user_id=user_id)
- try:
- collection = Collection.objects.get(pk=collection_id, creator__id=user_id)
- except Collection.DoesNotExist:
- logger.error(f"[user:{user_id}] no collection {collection_id} found for user!")
- return
- # save current job then start export_query_as_csv task.
- job = Job.objects.create(
- type=Job.EXPORT_QUERY_AS_CSV,
- creator_id=user_id,
- description=collection.name,
- extra={
- "collection": get_collection_as_obj(collection),
- "query": query,
- "query_hash": query_hash,
- },
- )
- # create empty attachment and attach automatically to the job
- attachment = Attachment.create_from_job(job, extension="csv")
- logger.info(
- f"[job:{job.pk} user:{user_id}] launched! "
- f"- Using bitmap {user_bitmap.get_bitmap_as_int()} (created:{created}) "
- f"- attachment:{attachment.pk} "
- f"- query:{query_hash} description:{job.description}"
- )
-
- # add query to extra. Job status should be INIT
- update_job_progress(
- task=self,
- job=job,
- taskstate=TASKSTATE_INIT,
- progress=0.0,
- logger=logger,
- )
-
- export_query_as_csv_progress.delay(
- job_id=job.pk,
- query=query,
- query_hash=query_hash,
- user_bitmap_key=user_bitmap.get_bitmap_as_int(),
- )
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- retry_backoff=True,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def store_collection_progress(
- self,
- job_id: int,
- collection_id: str,
- items_ids: list[int],
- skip: int,
- limit: int,
- progress: float = 0.0,
- content_type: str = "A",
- method: str = METHOD_ADD_TO_INDEX,
-) -> None:
- """
- Store the progress of a collection processing job.
-
- This function updates the progress of a job that processes a collection of items.
- It constructs a query based on the provided item IDs and synchronizes the query
- results to the collection. If the collection is marked as deleted, it logs the
- status and updates the job as completed. Otherwise, it continues to update the
- job progress and recursively calls itself until the processing is complete.
-
- Args:
- self: The task instance.
- job_id (int): The ID of the job.
- collection_id (int): The ID of the collection.
- items_ids (list[int]): A list of item IDs to be processed.
- skip (int): The number of items to skip in the query.
- limit (int): The maximum number of items to process in one batch.
- content_type (str): The type of content being processed.
- method (str): The method used for processing.
-
- Returns:
- None
- """
- extra: dict = {}
-
- job = Job.objects.get(pk=job_id)
- try:
- collection = Collection.objects.get(pk=collection_id)
- except Collection.DoesNotExist:
- logger.warning(f"Collection.DoesNotExist in DB with pk={collection_id}, skip.")
- update_job_completed(
- task=self,
- job=job,
- extra=extra,
- logger=logger,
- message="Collection doesn't exist!",
- )
- return
-
- if collection.status == Collection.DELETED:
- logger.info(f"Collection {collection_id} status is DEL, exit!")
- extra.update({"cleared": True, "reason": "Collection has status:DEL"})
- update_job_completed(
- task=self,
- job=job,
- extra=extra,
- logger=logger,
- message="Collection is marked for deletion!",
- )
- return
-
- if is_task_stopped(task=self, job=job, progress=progress, logger=logger):
- count_items_in_collection.delay(collection_id=collection_id)
- update_collections_in_tr_passages.delay(
- collection_prefix=collection_id, user_id=collection.creator.pk
- )
- return
+@default_task_config
+def after_user_registered(self, user_id):
+ logger.info(f"[user:{user_id}] just registered")
+ # send confirmation email to the registered user
+ # and send email to impresso admins
+ send_emails_after_user_registration(user_id=user_id, logger=logger)
- query = " OR ".join(map(lambda id: f"id:{id}", items_ids))
- extra = {
- "collection_id": collection_id,
- "collection": get_collection_as_obj(collection),
- "items_ids": items_ids,
- "query": query,
- "method": method,
- }
- page, loops, progress = helper_store_collection_progress(
- job=job,
- collection_id=collection_id,
- query=query,
- content_type=content_type,
- method=method,
- skip=skip,
- limit=limit,
- logger=logger,
- )
- if page <= loops:
- job.status = Job.RUN
- update_job_progress(
- task=self, job=job, progress=progress, extra=extra, logger=logger
- )
- store_collection_progress.delay(
- job_id=job.pk,
- collection_id=collection_id,
- items_ids=items_ids,
- skip=page * limit,
- limit=limit,
- progress=progress,
- content_type=content_type,
- method=method,
- )
- else:
- update_job_completed(task=self, job=job, extra=extra, logger=logger)
- count_items_in_collection.delay(collection_id=collection_id)
- update_collections_in_tr_passages.delay(
- collection_prefix=collection_id, user_id=collection.creator.pk
- )
+@default_task_config
+def after_user_activation(self, user_id):
+ logger.info(f"[user:{user_id}] is now active")
+ # send confirmation email to the registered user
+ # and send email to impresso admins
+ send_emails_after_user_activation(user_id=user_id, logger=logger)
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def store_collection(
- self,
- collection_id: int,
- items_ids: list = [],
- method: str = METHOD_ADD_TO_INDEX,
- content_type: str = "A",
-) -> None:
+@default_task_config
+def after_user_activation_plan_rejected(self, user_id: int) -> None:
"""
- Add items_ids to an existing collection.
+ Sends an email notification after a user is activated but only on the basic plan,
+ thus rejecting initial plan requested.
Args:
self: The task instance.
- collection_id (int): The ID of the collection to update.
- items_ids (list, optional): The list of item IDs to add or remove. Defaults to an empty list.
- method (str, optional): The method to use for updating the collection. Defaults to METHOD_ADD_TO_INDEX.
- content_type (str, optional): The content type of the items. Defaults to "A".
+ user_id (int): The ID of the user who was activated.
Returns:
None
"""
+ logger.info(f"[user:{user_id}] is now active, but on BASIC PLAN")
- # @todo check if the collection is not deleted
- try:
- collection = Collection.objects.get(pk=collection_id)
- collection_to_update = collection.status != Collection.DELETED
-
- if collection.status == Collection.DELETED:
- logger.info(
- f"Collection found with pk={collection_id}, "
- f"status={collection_to_update}"
- )
- logger.info(
- f"Collection found with pk={collection_id}, "
- f"status={collection_to_update}"
- )
- except Collection.DoesNotExist:
- logger.warning(f"Collection.DoesNotExist in DB with pk={collection_id}, skip.")
- return
-
- if method == METHOD_DEL_FROM_INDEX:
- job_type = Job.REMOVE_FROM_SOLR
- else:
- job_type = Job.SYNC_COLLECTION_TO_SOLR
- job = Job.objects.create(type=job_type, creator=collection.creator, status=Job.RUN)
-
- logger.info(
- f"[job:{job.pk} user:{collection.creator.pk}] started for collection:{collection.pk}!"
- )
- update_job_progress(
- task=self,
- job=job,
- taskstate=TASKSTATE_INIT,
- progress=0.0,
- extra={
- "collection": get_collection_as_obj(collection),
- "items": items_ids,
- "method": method,
- },
- logger=logger,
- )
- # start update chain
- store_collection_progress.delay(
- job_id=job.pk,
- collection_id=collection.pk,
- items_ids=items_ids,
- method=method,
- content_type=content_type,
- skip=0,
- limit=100,
- )
-
-
-@app.task(bind=True)
-def count_items_in_collection(self, collection_id):
- # get the collection
- collection = Collection.objects.get(pk=collection_id)
- count_in_solr = collection.update_count_items(logger=logger)
- count_in_db = CollectableItem.objects.filter(collection=collection).count()
- logger.info(
- "Collection(pk:{}) received {} in solr, {} in db.".format(
- collection.pk,
- count_in_solr,
- count_in_db,
- )
- )
+ send_emails_after_user_activation_plan_rejected(user_id=user_id, logger=logger)
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def add_to_collection_from_query(
- self, collection_id, user_id, query, content_type, fq=None, serialized_query=None
-):
- # check that the collection exists and user has access.
- collection = Collection.objects.get(pk=collection_id, creator__id=user_id)
- # save current job!
- job = Job.objects.create(
- type=Job.BULK_COLLECTION_FROM_QUERY, creator=collection.creator, status=Job.RUN
- )
- # add collection to extra.
- update_job_progress(
- task=self,
- job=job,
- taskstate=TASKSTATE_INIT,
- progress=0.0,
- extra={
- "collection": get_collection_as_obj(collection),
- "query": query,
- "serializedQuery": serialized_query,
- },
- )
- collection.serialized_search_query = serialized_query
- collection.save(update_fields=["serialized_search_query"])
- # execute premiminary query
- add_to_collection_from_query_progress.delay(
- query=query,
- fq=fq,
- job_id=job.pk,
- collection_id=collection_id,
- content_type=content_type,
- serialized_query=serialized_query,
- )
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def add_to_collection_from_query_progress(
- self,
- query,
- fq,
- job_id,
- collection_id,
- content_type,
- skip=0,
- limit=100,
- progress=0.0,
- serialized_query=None,
-):
- job = Job.objects.get(pk=job_id)
- if is_task_stopped(task=self, job=job, progress=progress, logger=logger):
- return
-
- # get the collection so that we can see its status
- try:
- collection = Collection.objects.get(pk=collection_id)
- except Collection.DoesNotExist:
- update_job_completed(
- task=self,
- job=job,
- extra={
- "collection": {"pk": collection_id},
- "query": query,
- "serializedQuery": serialized_query,
- },
- message=f"Collection doesn't exist tith pk={collection_id}",
- logger=logger,
- )
- return
- if collection.status == Collection.DELETED:
- update_job_completed(
- task=self,
- job=job,
- extra={
- "collection": get_collection_as_obj(collection),
- "query": query,
- "serializedQuery": serialized_query,
- "cleared": True,
- "reason": "Collection has status:DEL",
- },
- message="Collection is marked for deletion...",
- logger=logger,
- )
- return
- logger.info(
- f"[job:{job.pk} user:{job.creator.pk}] "
- f"Collection {collection_id}(status:{collection.status})"
- f"saving query hash = {serialized_query}"
- )
- page, loops, progress = helper_store_collection_progress(
- job=job,
- collection_id=collection_id,
- query=query,
- content_type=content_type,
- skip=skip,
- limit=limit,
- logger=logger,
- )
-
- if page < loops:
- job.status = Job.RUN
- update_job_progress(task=self, job=job, progress=progress, logger=logger)
-
- add_to_collection_from_query_progress.delay(
- query=query,
- fq=fq,
- job_id=job_id,
- collection_id=collection_id,
- content_type=content_type,
- skip=skip + limit,
- limit=limit,
- serialized_query=serialized_query,
- progress=progress,
- )
- else:
- count_items_in_collection.delay(collection_id=collection_id)
- update_collections_in_tr_passages.delay(
- collection_prefix=collection_id, user_id=collection.creator.pk
- )
- update_job_completed(task=self, job=job, logger=logger)
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def remove_collection(self, collection_id, user_id):
- """
- Remove a collection only if its status is DEL
- """
- job = Job.objects.create(
- type=Job.DELETE_COLLECTION, creator_id=user_id, status=Job.RUN
- )
- # check that the collection (still) exists!
- try:
- collection = Collection.objects.get(pk=collection_id)
- # only if the creator is the owner and status is DEL
- is_collection_to_delete = (
- collection.status == Collection.DELETED and collection.creator.pk == user_id
- )
- collection_seralized = get_collection_as_obj(collection)
- logger.info(
- f"[job:{job.pk} user:{user_id}]"
- f" Collection found with pk={collection_id},"
- f" status={is_collection_to_delete}"
- )
- except Collection.DoesNotExist:
- collection_seralized = {"pk": collection_id}
- is_collection_to_delete = True
- logger.info(
- f"[job:{job.pk} user:{user_id}] "
- f"Collection.DoesNotExist in DB with pk={collection_id}, removing on SOLR..."
- )
- if not is_collection_to_delete:
- logger.info(
- f"[job:{job.pk} user:{user_id}] "
- f"Cannot delete collection pk={collection_id}, please set it status=DEL!"
- )
- update_job_completed(task=self, job=job, logger=logger)
- return
- # stat loop
- update_job_progress(
- task=self,
- job=job,
- taskstate=TASKSTATE_INIT,
- progress=0.0,
- extra={"collection": collection_seralized},
- logger=logger,
- )
- remove_collection_progress.delay(
- job_id=job.pk, collection_id=collection_id, user_id=user_id
- )
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def remove_collection_progress(
+@default_task_config
+def email_password_reset(
self,
- job_id: int,
- collection_id: int,
user_id: int,
- limit: int = 100,
- progress: float = 0.0,
+ token: str = "nonce",
+ callback_url: str = "https://impresso-project.ch/app/reset-password",
) -> None:
"""
- This task attempts to remove a collection in a paginated manner, updating the job progress
- accordingly. If the task is stopped, it will return early. Otherwise, it will continue to
- delete the collection in chunks, updating the progress and retrying if necessary until the
- collection is fully removed.
+ Send a password reset email to the user containing a reset link.
Args:
- self (Task): The current task instance.
- job_id (int): The ID of the job associated with this task.
- collection_id (int): The ID of the collection to be removed.
- user_id (int): The ID of the user requesting the removal.
- limit (int, optional): The maximum number of items to process in one go. Defaults to 100.
- progress (float, optional): The current progress of the task. Defaults to 0.0.
+ self: Celery task instance (automatically provided by Celery).
+ user_id (int): The unique identifier of the user requesting password reset.
+ token (str, optional): The password reset token/nonce to include in the reset link.
+ Defaults to "nonce".
+ callback_url (str, optional): The base URL for the password reset callback.
+ Defaults to "https://impresso-project.ch/app/reset-password".
Returns:
None
- """
- job = Job.objects.get(pk=job_id)
- if is_task_stopped(task=self, job=job, progress=progress, logger=logger):
- return
- page, loops, progress = helper_remove_collection_progress(
- collection_id=collection_id, limit=limit, job=job
- )
- update_job_progress(task=self, job=job, progress=progress, extra={}, logger=logger)
- if progress < 1.0:
- remove_collection_progress.delay(
- job_id=job.pk, collection_id=collection_id, user_id=user_id
- )
- else:
- logger.info(f"remove_collection_progress completed page={page} loops={loops}")
- try:
- removed = Collection.objects.get(pk=collection_id).delete()
- logger.info(f"Collection removed: {removed}")
- except Collection.DoesNotExist:
- logger.info("Collection has already been deleted from db. Bye!")
- remove_collection_in_tr.delay(collection_id=collection_id, user_id=user_id)
- update_job_completed(task=self, job=job)
+ Raises:
+ Handled internally by the underlying `send_email_password_reset` function.
+ Example:
+ >>> email_password_reset.delay(user_id="12345", token="abc123xyz")
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def update_collections_in_tr_passages_progress(
- self,
- job_id: int,
- collection_prefix: str,
- progress: float = 0.0,
- skip: int = 0,
- limit: int = 100,
-):
- """
- Updates the progress of collections in TR passages for a given job.
- This function retrieves the job by its ID, checks if the task should be stopped,
- synchronizes the collections in TR passages, updates the job progress, and
- recursively calls itself if the job is not yet complete.
- Args:
- self: The task instance.
- job_id (int): The ID of the job to update.
- collection_prefix (str): The prefix of the collection to update.
- progress (float, optional): The current progress of the job. Defaults to 0.0.
- skip (int, optional): The number of items to skip. Defaults to 0.
- limit (int, optional): The maximum number of items to process in one call. Defaults to 100.
- Returns:
- None
+ Note:
+ This task is decorated with @default_task_config which applies default
+ Celery configuration settings (retry policy, routing, etc.).
"""
- # get the job so that we can update its status
- job = Job.objects.get(pk=job_id)
- extra: dict = {}
- if is_task_stopped(task=self, job=job, progress=progress, extra=extra):
- return
- page, loops, progress = helper_update_collections_in_tr_passages_progress(
- collection_id=collection_prefix, job=job, skip=skip, limit=limit, logger=logger
- )
-
- update_job_progress(task=self, job=job, progress=progress, extra=extra)
-
- if page < loops:
- logger.info(f"[job:{job.pk} user:{job.creator.pk}] task still running!")
- update_collections_in_tr_passages_progress.delay(
- job_id=job_id,
- collection_prefix=collection_prefix,
- progress=progress,
- skip=skip + limit,
- limit=limit,
- )
- else:
- update_job_completed(task=self, job=job, extra=extra)
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def update_collections_in_tr_passages(self, collection_prefix, user_id=None):
- collections = Collection.objects.filter(
- pk__startswith=collection_prefix.replace("*", "")
- )
- total = collections.count()
- logger.info(f"Collections pk__startswith={collection_prefix}" f"count={total}")
- # save current job!
- job = Job.objects.create(
- type=Job.SYNC_COLLECTIONS_TO_SOLR_TR,
- creator_id=user_id if user_id else collections.first().creator.pk,
- status=Job.RUN,
- )
- # initialize job
- update_job_progress(task=self, job=job, taskstate=TASKSTATE_INIT, progress=0.0)
- update_collections_in_tr_passages_progress.delay(
- collection_prefix=collection_prefix,
- job_id=job.pk,
- )
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def remove_collection_in_tr(self, collection_id, user_id):
- try:
- collection = Collection.objects.get(pk=collection_id)
- collection_to_delete = collection.status == Collection.DELETED
- logger.info(
- f"Collection found with pk={collection_id}, "
- f"status={collection_to_delete}"
- )
- except Collection.DoesNotExist:
- collection_to_delete = True
- logger.info(f"Collection.DoesNotExist in DB with pk={collection_id}")
- # save current job!
- job = Job.objects.create(
- type=Job.REMOVE_COLLECTIONS_FROM_SOLR_TR, creator_id=user_id, status=Job.RUN
- )
- if not collection_to_delete:
- logger.info(f"Collection pk={collection_id} not marked as DEL!!")
- update_job_completed(task=self, job=job)
- return
- logger.info(f"Delete collection pk={collection_id} from TR index...")
- # initialize job
- update_job_progress(task=self, job=job, taskstate=TASKSTATE_INIT, progress=0.0)
- remove_collection_in_tr_progress.delay(
- collection_id=collection_id, job_id=job.pk, skip=0, limit=100
- )
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def remove_collection_in_tr_progress(self, collection_id, job_id, skip=0, limit=100):
- # get the job so that we can update its status
- job = Job.objects.get(pk=job_id)
- if is_task_stopped(task=self, job=job, logger=logger):
- return
- page, loops, progress = remove_collection_from_tr_passages(
- collection_id=collection_id, job=job, skip=skip, limit=limit, logger=logger
- )
- logger.info(
- f"[job:{job.pk} user:{job.creator.pk}] running for collection={collection_id}"
- f"{page}/{loops} {progress}%"
- )
- update_job_progress(task=self, job=job, progress=progress, logger=logger)
-
- if progress < 1.0:
- remove_collection_in_tr_progress.delay(
- collection_id=collection_id, job_id=job.pk, skip=skip + limit, limit=limit
- )
- else:
- update_job_completed(task=self, job=job, logger=logger)
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def after_user_registered(self, user_id):
- logger.info(f"[user:{user_id}] just registered")
- # send confirmation email to the registered user
- # and send email to impresso admins
- send_emails_after_user_registration(user_id=user_id, logger=logger)
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def after_user_activation(self, user_id):
- logger.info(f"[user:{user_id}] is now active")
- # send confirmation email to the registered user
- # and send email to impresso admins
- send_emails_after_user_activation(user_id=user_id, logger=logger)
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def email_password_reset(
- self,
- user_id,
- token="nonce",
- callback_url="https://impresso-project.ch/app/reset-password",
-):
logger.info(f"[user:{user_id}] requested password reset!")
- # send confirmation email to the registered user
- # and send email to impresso admins
send_email_password_reset(
user_id=user_id, token=token, callback_url=callback_url, logger=logger
- ),
+ )
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
+@default_task_config
def email_plan_change(self, user_id: int, plan: str) -> None:
"""
Sends an email notification for a user's plan change request.
@@ -959,12 +194,10 @@ def email_plan_change(self, user_id: int, plan: str) -> None:
None
"""
logger.info(f"[user:{user_id}] requested plan change to {plan}!")
- # send confirmation email to the registered user
- # and send email to impresso admins
send_email_plan_change(user_id=user_id, plan=plan, logger=logger)
-@app.task(bind=True)
+@default_task_config
def add_user_to_group_task(self, user_id: int, group_name: str) -> None:
"""
Task to add a user to a group.
@@ -982,7 +215,7 @@ def add_user_to_group_task(self, user_id: int, group_name: str) -> None:
user.groups.add(group)
-@app.task(bind=True)
+@default_task_config
def remove_user_from_group_task(self, user_id: int, group_name: str) -> None:
"""
Task to remove a user from a group.
@@ -1000,13 +233,7 @@ def remove_user_from_group_task(self, user_id: int, group_name: str) -> None:
user.groups.remove(group)
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
+@default_task_config
def after_plan_change_rejected(self, user_id: int) -> None:
"""
Rejects user request (if it is not already accepted!) then
@@ -1040,155 +267,7 @@ def after_plan_change_rejected(self, user_id: int) -> None:
send_email_plan_change_rejected(user_id=user_id, plan=req.plan.name, logger=logger)
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def add_to_collection_from_tr_passages_query(
- self,
- collection_id,
- user_id,
- query,
- content_type="A",
- fq=None,
- serialized_query=None,
- skip=0,
- limit=100,
-):
- # check that the collection exists and user has access.
- collection = Collection.objects.get(pk=collection_id, creator__id=user_id)
- # save current job!
- job = Job.objects.create(
- type=Job.BULK_COLLECTION_FROM_QUERY_TR,
- creator=collection.creator,
- status=Job.RUN,
- )
- # add current collection to extra.
- update_job_progress(
- task=self,
- job=job,
- taskstate=TASKSTATE_INIT,
- progress=0.0,
- extra={
- "collection": get_collection_as_obj(collection),
- "query": query,
- "serializedQuery": serialized_query,
- },
- message=f"Add to collection {collection_id} from tr_passages query {query}",
- )
- # execute premiminary query
- add_to_collection_from_tr_passages_query_progress.delay(
- query=query, job_id=job.pk, collection_id=collection_id, skip=skip, limit=limit
- )
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def add_to_collection_from_tr_passages_query_progress(
- self,
- query: str,
- job_id: int,
- collection_id: str,
- skip: int = 0,
- limit: int = 100,
-) -> None:
- """
- Add the content item id resulting from given solr search query on tr_passages index to a collection.
-
- Args:
- query: The query string to execute on tr_passages index.
- job_id: The job id to update.
- collection_id: The collection id to add the content items to.
- skip: The number of results to skip.
- limit: The number of results to return.
- prev_progress: The previous progress value.
- """
- # get the job so that we can update its status
- job = Job.objects.get(pk=job_id)
- if is_task_stopped(task=self, job=job):
- return
- page, loops, progress = add_tr_passages_query_results_to_collection(
- collection_id=collection_id,
- job=job,
- query=query,
- skip=skip,
- limit=limit,
- logger=logger,
- )
- update_job_progress(
- task=self,
- job=job,
- progress=progress,
- message=f"loop {page} of {loops} collection={collection_id}",
- logger=logger,
- )
-
- if progress < 1.0:
- # call the task again, updating the skip and limit
- add_to_collection_from_tr_passages_query_progress.delay(
- query=query,
- job_id=job_id,
- collection_id=collection_id,
- skip=skip + limit,
- limit=limit,
- )
- else:
- # save number of item added to collection
- collection = Collection.objects.get(
- pk=collection_id, creator__id=job.creator.pk
- )
- # update collection count_items manually from main index.
- total = collection.update_count_items()
- # done!
- update_job_completed(
- task=self,
- job=job,
- message=f"loop {page} of {loops} collection={collection_id} items={total}",
- )
-
-
-@app.task(
- bind=True,
- autoretry_for=(Exception,),
- exponential_backoff=2,
- retry_kwargs={"max_retries": 5},
- retry_jitter=True,
-)
-def update_collection(
- self, collection_id, user_id, items_ids_to_add=[], items_ids_to_remove=[]
-):
- # verify that the collection belong to the user
- try:
- Collection.objects.get(pk=collection_id, creator__id=user_id)
- except Collection.DoesNotExist:
- logger.info(f"Collection {collection_id} not found for user {user_id}")
- return
-
- if items_ids_to_add:
- store_collection.delay(
- collection_id=collection_id,
- items_ids=items_ids_to_add,
- method=METHOD_ADD_TO_INDEX,
- )
- if items_ids_to_remove:
- store_collection.delay(
- collection_id=collection_id,
- items_ids=items_ids_to_remove,
- method=METHOD_DEL_FROM_INDEX,
- )
- # update count items in collection (db)
- count_items_in_collection.delay(collection_id=collection_id)
-
-
-@app.task(bind=True)
+@default_task_config
def update_user_bitmap_task(self, user_id):
"""
Update the user bitmap for the given user.
diff --git a/impresso/templates/admin/users/toggle_status.html b/impresso/templates/admin/users/toggle_status.html
index c9b2337..53c3839 100644
--- a/impresso/templates/admin/users/toggle_status.html
+++ b/impresso/templates/admin/users/toggle_status.html
@@ -172,6 +172,21 @@
This will activate the user without notifying them.
+
+
+ Activate BUT ON BASIC PLAN ONLY (Send Email)
+
+
+ A nice message will be sent to specify why the user has been activated
+ only on the basic plan.
+
{% else %}
diff --git a/impresso/templates/emails/account_activated_plan_rejected_to_user.html b/impresso/templates/emails/account_activated_plan_rejected_to_user.html
new file mode 100644
index 0000000..3107354
--- /dev/null
+++ b/impresso/templates/emails/account_activated_plan_rejected_to_user.html
@@ -0,0 +1,47 @@
+Dear {{user.first_name}},
+
+
+ Your account has been activated as a {{plan_label}} because
+ the information provided during registration did not include sufficient
+ evidence of student enrolment (for Student User plan) or an academic
+ affiliation (for Academic User plan).
+
+
+
+ If you would like to upgrade your account to a Student or Academic plan,
+ please log in and click on "Request a change of plan" (top-right menu in your
+ profile).
+
+
+
+ For Student access: You must provide a valid student email
+ address from your university (personal providers such as Gmail or Outlook are
+ not accepted).
+
+
+
+ For Academic access: Please provide a link in the institution
+ URL field to your researcher profile page on your university's website. If you
+ do not have such a profile page, you may alternatively email us a proof of
+ your enrolment as a researcher at an academic institution. Please note that if
+ your job title is not academic (researcher, professor, lecturer, etc.) you are
+ unfortunately not eligible for Academic access.
+
+
+
+ We apply these requirements to comply with copyright restrictions and the
+ conditions set by our content providers while maximising research
+ opportunities.
+
+
+
+ If you have any questions about our user plan policies, please visit our
+ terms of use page . You
+ can also reach out to us via email on
+ info@impresso-project.ch .
+
+
+
+ With best wishes,
+ The Impresso team
+
diff --git a/impresso/templates/emails/account_activated_plan_rejected_to_user.txt b/impresso/templates/emails/account_activated_plan_rejected_to_user.txt
new file mode 100644
index 0000000..ea61200
--- /dev/null
+++ b/impresso/templates/emails/account_activated_plan_rejected_to_user.txt
@@ -0,0 +1,26 @@
+Dear {{user.first_name}},
+
+Your account has been activated as a {{plan_label}} because the information provided during registration did not include
+sufficient evidence of student enrolment (for Student User plan) or an academic affiliation (for Academic User plan).
+
+If you would like to upgrade your account to a Student or Academic plan, please log in and click on
+“Request a change of plan” (top-right menu in your profile).
+
+For Student access: You must provide a valid student email address from your university
+(personal providers such as Gmail or Outlook are not accepted).
+
+For Academic access: Please provide a link in the institution URL field to your researcher profile page
+on your university’s website. If you do not have such a profile page, you may alternatively email us a
+proof of your enrolment as a researcher at an academic institution.
+Please note that if your job title is not academic (researcher, professor, lecturer, etc.) you are
+unfortunately not eligible for Academic access.
+
+We apply these requirements to comply with copyright restrictions and the conditions set by our content providers
+while maximising research opportunities.
+
+If you have any questions about our user plan policies, please visit our terms of use page
+at {{ impresso_base_url }}/app/terms-of-use
+You can also reach out to us via email on info@impresso-project.ch.
+
+With best wishes,
+The Impresso team
\ No newline at end of file
diff --git a/impresso/tests/utils/tasks/test_account.py b/impresso/tests/utils/tasks/test_account.py
index 7a7e088..9fd2b08 100644
--- a/impresso/tests/utils/tasks/test_account.py
+++ b/impresso/tests/utils/tasks/test_account.py
@@ -9,6 +9,7 @@
send_email_plan_change,
send_email_plan_change_accepted,
send_email_plan_change_rejected,
+ send_emails_after_user_activation_plan_rejected,
send_emails_after_user_registration,
)
from django.utils import timezone
@@ -17,6 +18,40 @@
logger = logging.getLogger("console")
+class TestAccountActivationPlanRejected(TransactionTestCase):
+ """
+ Test account activation with plan rejected
+ ENV=test pipenv run ./manage.py test impresso.tests.utils.tasks.test_account.TestAccountActivationPlanRejected
+ """
+
+ def setUp(self):
+ self.user = User.objects.create_user(
+ username="testuser",
+ first_name="Jane",
+ last_name="Doe",
+ password="12345",
+ email="test@test.com",
+ )
+
+ def test_send_emails_after_user_activation_plan_rejected(self):
+ send_emails_after_user_activation_plan_rejected(
+ user_id=self.user.id,
+ logger=logger,
+ )
+ self.assertEqual(len(mail.outbox), 1)
+ # check the subject
+ self.assertEqual(
+ mail.outbox[0].subject,
+ settings.IMPRESSO_EMAIL_SUBJECT_AFTER_USER_ACTIVATION_PLAN_REJECTED_TO_USER,
+ )
+ # check content
+ self.assertTrue("Dear Jane," in mail.outbox[0].body)
+ self.assertTrue(
+ settings.IMPRESSO_GROUP_USER_PLAN_BASIC_LABEL in mail.outbox[0].body,
+ f"should receive correct email:f{mail.outbox[0].body}",
+ )
+
+
class TestAccountPlanChangeToBasicUser(TransactionTestCase):
"""
Test account plan change request
diff --git a/impresso/utils/tasks/account.py b/impresso/utils/tasks/account.py
index fb49392..1cd9a15 100644
--- a/impresso/utils/tasks/account.py
+++ b/impresso/utils/tasks/account.py
@@ -3,6 +3,8 @@
from logging import Logger
from django.core import mail
from django.contrib.auth.models import User, Group
+
+from impresso.utils.tasks.email import send_templated_email_with_context
from ...models import UserChangePlanRequest
from django_registration.backends.activation.views import RegistrationView
from django.core.mail import EmailMultiAlternatives
@@ -155,7 +157,9 @@ def send_emails_after_user_registration(user_id: int, logger=default_logger):
logger.exception(f"user={user_id} Error sending email: {e} to staff")
-def send_emails_after_user_activation(user_id, logger=default_logger):
+def send_emails_after_user_activation(
+ user_id: int, logger: Logger = default_logger
+) -> None:
logger.info(f"looking for user={user_id}...")
try:
user = User.objects.get(pk=user_id)
@@ -176,7 +180,7 @@ def send_emails_after_user_activation(user_id, logger=default_logger):
)
try:
emailMessage = EmailMultiAlternatives(
- subject="Access granted to the impresso interface",
+ subject="Access granted to the Impresso interface",
body=txt_content,
from_email=f"Impresso Team <{settings.DEFAULT_FROM_EMAIL}>",
to=[
@@ -195,6 +199,46 @@ def send_emails_after_user_activation(user_id, logger=default_logger):
logger.info(f"Password reset email sent to user={user_id}")
+def send_emails_after_user_activation_plan_rejected(
+ user_id: int, logger: Logger = default_logger
+) -> None:
+ """
+ Sends a notification email to the user informing them that their activation
+ has been processed, but they have been activated only on the BASIC plan.
+
+ Args:
+ user_id (int): The ID of the user to send the email to.
+ logger (Logger, optional): The logger to use for logging information. Defaults to default_logger.
+ Raises:
+ User.DoesNotExist: If no user with the given user_id is found.
+ Exception: If there is an error sending the email.
+ """
+ try:
+ user = User.objects.get(pk=user_id)
+ except User.DoesNotExist:
+ logger.error(f"user={user_id} NOT FOUND!")
+ return
+ send_templated_email_with_context(
+ template="account_activated_plan_rejected_to_user",
+ subject=settings.IMPRESSO_EMAIL_SUBJECT_AFTER_USER_ACTIVATION_PLAN_REJECTED_TO_USER,
+ context={
+ "user": user,
+ "impresso_base_url": settings.IMPRESSO_BASE_URL,
+ "plan_label": settings.IMPRESSO_GROUP_USER_PLAN_BASIC_LABEL,
+ },
+ from_email=settings.IMPRESSO_EMAIL_LABEL_DEFAULT_FROM_EMAIL,
+ to=[
+ user.email,
+ ],
+ cc=[],
+ reply_to=[
+ settings.DEFAULT_FROM_EMAIL,
+ ],
+ logger=logger,
+ fail_silently=False,
+ )
+
+
def send_email_password_reset(
user_id: int,
token: str = "token",
diff --git a/impresso/views/admin/user_admin.py b/impresso/views/admin/user_admin.py
index 25d2f55..350e01b 100644
--- a/impresso/views/admin/user_admin.py
+++ b/impresso/views/admin/user_admin.py
@@ -14,7 +14,7 @@
from unfold.decorators import action # type: ignore
from unfold.views import UnfoldModelAdminViewMixin # type: ignore
-from impresso.tasks import after_user_activation
+from impresso.tasks import after_user_activation, after_user_activation_plan_rejected
from impresso.utils.models.user import (
get_plan_from_user_groups,
get_plan_from_group_name,
@@ -89,6 +89,18 @@ def post(self, request, *args, **kwargs):
elif user.is_active and request.POST.get("activation_mode") == "silently":
# DO NOT SEND EMAIL
messages.success(request, "User status toggled to active, no email sent.")
+ elif user.is_active and request.POST.get("activation_mode") == "plan_rejected":
+ basic_plan_group = Group.objects.get(
+ name=settings.IMPRESSO_GROUP_USER_PLAN_BASIC
+ )
+ user.groups.clear()
+ user.groups.add(basic_plan_group)
+
+ after_user_activation_plan_rejected.delay(user_id=user.pk)
+ messages.success(
+ request,
+ "User status toggled to active on BASIC PLAN, email sent.",
+ )
else:
messages.success(
request,