Compare commits
155 Commits
938ff5b0d2
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 92c1ee425d | |||
| ff587d9e1b | |||
| 0fab9ac0bf | |||
| 607d8eaf85 | |||
| 0dc997d2cf | |||
| 0e35fb0ad3 | |||
| 6ab6c3c0bf | |||
| e75dda84ef | |||
|
|
b0e009d606 | ||
| 3848cb6d23 | |||
| d0a90ce8ff | |||
| 9d7821b94d | |||
| 8e43409895 | |||
| 9b3992f250 | |||
| fbc9a1ff0a | |||
| 1a0617fbd0 | |||
| 15ef35e249 | |||
|
|
a450e7409f
|
||
|
|
10e39b8331
|
||
| 59cc1c41a9 | |||
|
|
2c2cb5446f
|
||
|
|
521075cf04
|
||
| 93d3e4703b | |||
| e09e6a21f0 | |||
|
|
4ea1e66cdf
|
||
| c2ad0e67c3 | |||
|
|
96a3971781 | ||
| 989d0fc20d | |||
|
|
2f9babe18e | ||
| d39fff2be0 | |||
|
|
badd61b0aa | ||
|
|
a001ac1de6 | ||
| 9bee1b9a12 | |||
|
|
4796a08acc | ||
| 17484fa815 | |||
|
|
96b49bb064 | ||
| 3ccb872cc3 | |||
|
|
b2ea693d9d | ||
|
|
48f395866b | ||
|
|
c8e01f5201 | ||
|
|
380dcb22c3 | ||
| ed878bbdae | |||
|
|
0eddb9696a | ||
|
|
c01fc14258 | ||
|
|
88ce59aecc | ||
|
|
a118df487d | ||
|
|
d0a550fee6 | ||
| cc25d2ad2e | |||
|
|
99b06d1f3b | ||
|
|
906206d4cd | ||
| eebd5c9978 | |||
|
|
2acb194d40 | ||
|
|
b897447296 | ||
|
|
d387bf4f03 | ||
|
|
be8d6d4a12 | ||
|
|
2b1e7ff4eb | ||
|
|
2c94040221 | ||
| 2d93555c60 | |||
|
|
73b023dca2 | ||
| 6555fdc41e | |||
|
|
e8b835e6fc | ||
|
|
04a55844fd | ||
|
|
f7ca4bc44b | ||
|
|
7669a5049c | ||
|
|
e2f71a801c | ||
| 49baf6a37d | |||
|
|
d65a802afb | ||
| 6342133851 | |||
|
|
d3687779a2 | ||
|
|
1c5ba6cf90 | ||
| 22d596d666 | |||
|
|
987f308e06 | ||
| bcc9305a00 | |||
|
|
62ff7f5792 | ||
| ad271aa817 | |||
|
|
8a97b6e2a0 | ||
| 43e7068110 | |||
|
|
6bae864c1e | ||
| 17d30a4073 | |||
|
|
0818f71566 | ||
| 3799d76bed | |||
|
|
fbe8546b37 | ||
| a59d21cfcb | |||
|
|
43594777e0 | ||
| f7c89be05c | |||
|
|
2e7949ac23 | ||
| f5c2f87820 | |||
|
|
abbc3c3d1d
|
||
| c028a83bef | |||
|
|
155c8f7569
|
||
| d83f7db57c | |||
|
|
221c8c19c2
|
||
| c0cd4e5037 | |||
|
|
78c4313874
|
||
| ec89a5fe35 | |||
|
|
71fe06edd1
|
||
|
|
bff59eec06
|
||
|
|
8b83712cbf
|
||
|
|
73ef38a144
|
||
|
|
1c7b96f723
|
||
| a880643d65 | |||
|
|
229c0f8b48
|
||
| d7bfb44ced | |||
|
|
ec3e1ee1bf
|
||
| 44ffae7f99 | |||
|
|
c9dab3e93b
|
||
|
|
349f1db721
|
||
| de56b564c5 | |||
|
|
833ff378ea
|
||
|
|
754b0ca5f6
|
||
| 03fcbdb5ad | |||
|
|
0cbac68ec1
|
||
| 4c27cfe1dd | |||
|
|
a598727888
|
||
| 36eb0f1dd2 | |||
|
|
f950e3cd5e
|
||
| 311ad80320 | |||
|
|
08e003e165
|
||
| 076aaa0b9e | |||
|
|
7b2ad4cfe5
|
||
|
|
56e53478ea
|
||
| 96f9eca19d | |||
|
|
f6edcadd46
|
||
|
|
4992b0cb9d
|
||
|
|
9d323d2040
|
||
| aeb0afb2ea | |||
|
|
b73a2d4d72
|
||
| 7d226bc4ef | |||
|
|
9b677741cb | ||
| c5f2902417 | |||
|
|
5d1c5f43bc | ||
| 027dff20e3 | |||
|
|
c4fde90a9c | ||
|
|
cfe0cbca62 | ||
|
|
5adff60d4b | ||
|
|
0c9340d279 | ||
|
|
ebddb6c904 | ||
|
|
29e3589b1a | ||
|
|
14db1bb57e | ||
|
|
36ac487cbd
|
||
|
|
932b05cc02
|
||
|
|
683cba4280
|
||
|
|
2cb1e622e2
|
||
|
|
11b89e9e1c
|
||
|
|
06be5d6752
|
||
|
|
ebdf20e708
|
||
|
|
47e8afea18
|
||
|
|
630c86221f
|
||
|
|
2d2edd8605
|
||
|
|
0b5fca3be6
|
||
|
|
eb2cdfc5f2
|
||
|
|
82e6bc2ee0
|
||
|
|
e279e15c9c
|
||
|
|
6fc28f9d9a
|
||
|
|
ca211c14e9
|
15
.dockerignore
Normal file
15
.dockerignore
Normal file
@@ -0,0 +1,15 @@
|
||||
.git
|
||||
.gitea
|
||||
.github
|
||||
.venv
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.pytest_cache/
|
||||
.mypy_cache/
|
||||
.ruff_cache/
|
||||
.coverage
|
||||
.benchmarks/
|
||||
media/
|
||||
staticfiles/
|
||||
248
.gitea/workflows/ci.yml
Normal file
248
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,248 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
schedule:
|
||||
- cron: "0 2 * * *"
|
||||
|
||||
concurrency:
|
||||
group: ci-pr-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
ci:
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_IMAGE: nohype-ci:${{ github.run_id }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build
|
||||
run: docker build -t "$CI_IMAGE" .
|
||||
|
||||
- name: Start PostgreSQL
|
||||
run: |
|
||||
docker run -d --name ci-postgres \
|
||||
-e POSTGRES_DB=nohype \
|
||||
-e POSTGRES_USER=nohype \
|
||||
-e POSTGRES_PASSWORD=nohype \
|
||||
postgres:16-alpine
|
||||
for i in $(seq 1 30); do
|
||||
if docker exec ci-postgres pg_isready -U nohype -d nohype >/dev/null; then
|
||||
exit 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
docker logs ci-postgres || true
|
||||
exit 1
|
||||
|
||||
- name: Ruff
|
||||
run: docker run --rm --network container:ci-postgres -e SECRET_KEY=ci-secret-key -e DATABASE_URL=postgres://nohype:nohype@127.0.0.1:5432/nohype "$CI_IMAGE" ruff check .
|
||||
|
||||
- name: Mypy
|
||||
run: docker run --rm --network container:ci-postgres -e SECRET_KEY=ci-secret-key -e DATABASE_URL=postgres://nohype:nohype@127.0.0.1:5432/nohype "$CI_IMAGE" mypy apps config
|
||||
|
||||
- name: Pytest
|
||||
run: docker run --rm --network container:ci-postgres -e SECRET_KEY=ci-secret-key -e DATABASE_URL=postgres://nohype:nohype@127.0.0.1:5432/nohype "$CI_IMAGE" pytest --ignore=e2e
|
||||
|
||||
- name: Tailwind build (assert generated diff is clean)
|
||||
run: |
|
||||
docker run --name ci-tailwind \
|
||||
--network container:ci-postgres \
|
||||
-e SECRET_KEY=ci-secret-key \
|
||||
-e DATABASE_URL=postgres://nohype:nohype@127.0.0.1:5432/nohype \
|
||||
"$CI_IMAGE" \
|
||||
sh -lc "python manage.py tailwind install --no-input && python manage.py tailwind build"
|
||||
docker cp ci-tailwind:/app/theme/static/css/styles.css /tmp/ci-styles.css
|
||||
docker rm -f ci-tailwind
|
||||
cmp -s theme/static/css/styles.css /tmp/ci-styles.css
|
||||
|
||||
- name: Remove PostgreSQL
|
||||
if: always()
|
||||
run: |
|
||||
docker rm -f ci-postgres || true
|
||||
|
||||
- name: Remove CI image
|
||||
if: always()
|
||||
run: docker image rm -f "$CI_IMAGE" || true
|
||||
|
||||
pr-e2e:
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_IMAGE: nohype-ci-e2e:${{ github.run_id }}
|
||||
PLAYWRIGHT_CACHE_VOLUME: nohype-playwright-browsers
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build
|
||||
run: docker build -t "$CI_IMAGE" .
|
||||
|
||||
- name: Ensure Playwright Chromium cache
|
||||
run: |
|
||||
docker volume create "$PLAYWRIGHT_CACHE_VOLUME" >/dev/null
|
||||
docker run --rm \
|
||||
-v "$PLAYWRIGHT_CACHE_VOLUME:/ms-playwright" \
|
||||
-e PLAYWRIGHT_BROWSERS_PATH=/ms-playwright \
|
||||
"$CI_IMAGE" \
|
||||
python -m playwright install chromium
|
||||
|
||||
- name: Start PostgreSQL
|
||||
run: |
|
||||
docker run -d --name pr-e2e-postgres \
|
||||
-e POSTGRES_DB=nohype \
|
||||
-e POSTGRES_USER=nohype \
|
||||
-e POSTGRES_PASSWORD=nohype \
|
||||
postgres:16-alpine
|
||||
for i in $(seq 1 30); do
|
||||
if docker exec pr-e2e-postgres pg_isready -U nohype -d nohype >/dev/null; then
|
||||
exit 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
docker logs pr-e2e-postgres || true
|
||||
exit 1
|
||||
|
||||
- name: Start app with seeded content
|
||||
run: |
|
||||
docker run -d --name pr-e2e-app --network container:pr-e2e-postgres \
|
||||
-v "$PLAYWRIGHT_CACHE_VOLUME:/ms-playwright:ro" \
|
||||
-e SECRET_KEY=ci-secret-key \
|
||||
-e DATABASE_URL=postgres://nohype:nohype@127.0.0.1:5432/nohype \
|
||||
-e CONSENT_POLICY_VERSION=1 \
|
||||
-e EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend \
|
||||
-e DEFAULT_FROM_EMAIL=hello@nohypeai.com \
|
||||
-e NEWSLETTER_PROVIDER=buttondown \
|
||||
-e PLAYWRIGHT_BROWSERS_PATH=/ms-playwright \
|
||||
-e E2E_MODE=1 \
|
||||
"$CI_IMAGE" \
|
||||
sh -lc "python manage.py migrate --noinput && python manage.py seed_e2e_content && python manage.py runserver 0.0.0.0:8000"
|
||||
for i in $(seq 1 40); do
|
||||
if docker exec pr-e2e-app curl -fsS http://127.0.0.1:8000/ >/dev/null; then
|
||||
exit 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
docker logs pr-e2e-app || true
|
||||
exit 1
|
||||
|
||||
- name: Run E2E tests
|
||||
run: |
|
||||
docker exec -e E2E_BASE_URL=http://127.0.0.1:8000 pr-e2e-app \
|
||||
pytest e2e/ -o addopts='' -q --tb=short
|
||||
|
||||
- name: Remove containers
|
||||
if: always()
|
||||
run: |
|
||||
docker rm -f pr-e2e-app || true
|
||||
docker rm -f pr-e2e-postgres || true
|
||||
|
||||
- name: Remove CI image
|
||||
if: always()
|
||||
run: docker image rm -f "$CI_IMAGE" || true
|
||||
|
||||
nightly-e2e:
|
||||
if: github.event_name == 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_IMAGE: nohype-ci-nightly:${{ github.run_id }}
|
||||
PLAYWRIGHT_CACHE_VOLUME: nohype-playwright-browsers
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build
|
||||
run: docker build -t "$CI_IMAGE" .
|
||||
- name: Ensure Playwright Chromium cache
|
||||
run: |
|
||||
docker volume create "$PLAYWRIGHT_CACHE_VOLUME" >/dev/null
|
||||
docker run --rm \
|
||||
-v "$PLAYWRIGHT_CACHE_VOLUME:/ms-playwright" \
|
||||
-e PLAYWRIGHT_BROWSERS_PATH=/ms-playwright \
|
||||
"$CI_IMAGE" \
|
||||
python -m playwright install chromium
|
||||
- name: Start PostgreSQL
|
||||
run: |
|
||||
docker run -d --name nightly-postgres \
|
||||
-e POSTGRES_DB=nohype \
|
||||
-e POSTGRES_USER=nohype \
|
||||
-e POSTGRES_PASSWORD=nohype \
|
||||
postgres:16-alpine
|
||||
for i in $(seq 1 30); do
|
||||
if docker exec nightly-postgres pg_isready -U nohype -d nohype >/dev/null; then
|
||||
exit 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
docker logs nightly-postgres || true
|
||||
exit 1
|
||||
- name: Start dev server with seeded content
|
||||
run: |
|
||||
docker run -d --name nightly-e2e --network container:nightly-postgres \
|
||||
-v "$PLAYWRIGHT_CACHE_VOLUME:/ms-playwright:ro" \
|
||||
-e SECRET_KEY=ci-secret-key \
|
||||
-e DATABASE_URL=postgres://nohype:nohype@127.0.0.1:5432/nohype \
|
||||
-e CONSENT_POLICY_VERSION=1 \
|
||||
-e EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend \
|
||||
-e DEFAULT_FROM_EMAIL=hello@nohypeai.com \
|
||||
-e NEWSLETTER_PROVIDER=buttondown \
|
||||
-e PLAYWRIGHT_BROWSERS_PATH=/ms-playwright \
|
||||
-e E2E_MODE=1 \
|
||||
"$CI_IMAGE" \
|
||||
sh -lc "python manage.py migrate --noinput && python manage.py seed_e2e_content && python manage.py runserver 0.0.0.0:8000"
|
||||
for i in $(seq 1 40); do
|
||||
if docker exec nightly-e2e curl -fsS http://127.0.0.1:8000/ >/dev/null; then
|
||||
exit 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
docker logs nightly-e2e || true
|
||||
exit 1
|
||||
- name: Run Playwright E2E tests
|
||||
run: |
|
||||
docker exec -e E2E_BASE_URL=http://127.0.0.1:8000 nightly-e2e \
|
||||
pytest e2e/ apps/core/tests/test_nightly_e2e_playwright.py -o addopts='' -q --tb=short
|
||||
- name: Remove nightly container
|
||||
if: always()
|
||||
run: |
|
||||
docker rm -f nightly-e2e || true
|
||||
docker rm -f nightly-postgres || true
|
||||
- name: Remove CI image
|
||||
if: always()
|
||||
run: docker image rm -f "$CI_IMAGE" || true
|
||||
|
||||
deploy:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- agent-workspace
|
||||
env:
|
||||
BAO_TOKEN_FILE: /run/openbao-agent-ci_runner/token
|
||||
steps:
|
||||
- name: Configure SSH via OpenBao CA
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
: "${OPENBAO_ADDR:?OPENBAO_ADDR must be set by the runner environment}"
|
||||
mkdir -p ~/.ssh && chmod 700 ~/.ssh
|
||||
ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N "" -q
|
||||
BAO_TOKEN="$(<"$BAO_TOKEN_FILE")"
|
||||
SIGNED_KEY=$(curl -fsS \
|
||||
-H "X-Vault-Token: $BAO_TOKEN" \
|
||||
-H "X-Vault-Request: true" \
|
||||
-X POST \
|
||||
-d "{\"public_key\": \"$(cat ~/.ssh/id_ed25519.pub)\", \"valid_principals\": \"${{ vars.DEPLOY_USER }}\"}" \
|
||||
"${OPENBAO_ADDR}/v1/ssh/sign/${{ vars.DEPLOY_SSH_ROLE }}" \
|
||||
| jq -r '.data.signed_key')
|
||||
[ -n "$SIGNED_KEY" ] && [ "$SIGNED_KEY" != "null" ] \
|
||||
|| { echo "ERROR: failed to sign SSH key via OpenBao CA" >&2; exit 1; }
|
||||
printf '%s\n' "$SIGNED_KEY" > ~/.ssh/id_ed25519-cert.pub
|
||||
unset BAO_TOKEN SIGNED_KEY
|
||||
|
||||
- name: Add deploy host to known_hosts
|
||||
run: ssh-keyscan -H "${{ vars.DEPLOY_HOST }}" >> ~/.ssh/known_hosts 2>/dev/null
|
||||
|
||||
- name: Deploy to lintel-prod-01
|
||||
run: ssh "${{ vars.DEPLOY_USER }}@${{ vars.DEPLOY_HOST }}" "bash /srv/sum/nohype/app/deploy/deploy.sh"
|
||||
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
@@ -1,20 +0,0 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: Build
|
||||
run: docker compose build
|
||||
- name: Pytest
|
||||
run: docker compose run --rm web pytest
|
||||
- name: Ruff
|
||||
run: docker compose run --rm web ruff check .
|
||||
- name: Mypy
|
||||
run: docker compose run --rm web mypy apps config
|
||||
@@ -10,3 +10,6 @@
|
||||
- Added newsletter subscription + confirmation flow with provider sync abstraction.
|
||||
- Added templates/static assets baseline for homepage, article index/read, legal, about.
|
||||
- Added pytest suite with >90% coverage enforcement and passing Docker CI checks.
|
||||
- Added PR-only containerized CI path (`docker build` + `docker run`) to avoid compose-network exhaustion on shared runners.
|
||||
- Added newsletter signup forms in nav/footer/article, client-side progressive submit UX, and article social share controls.
|
||||
- Added content integrity management command and comment data-retention purge command with automated tests.
|
||||
|
||||
42
Dockerfile
42
Dockerfile
@@ -1,16 +1,47 @@
|
||||
FROM python:3.12-slim
|
||||
FROM python:3.12-slim-bookworm
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PIP_NO_CACHE_DIR=1
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
RUN set -eux; \
|
||||
sed -i 's|http://deb.debian.org|https://deb.debian.org|g' /etc/apt/sources.list.d/debian.sources; \
|
||||
printf '%s\n' \
|
||||
'Acquire::Retries "8";' \
|
||||
'Acquire::http::No-Cache "true";' \
|
||||
'Acquire::https::No-Cache "true";' \
|
||||
'Acquire::http::Pipeline-Depth "0";' \
|
||||
'Acquire::BrokenProxy "true";' \
|
||||
> /etc/apt/apt.conf.d/99docker-hardening; \
|
||||
apt-get update; \
|
||||
for attempt in 1 2 3; do \
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
libpq-dev \
|
||||
libavif-dev \
|
||||
curl \
|
||||
nodejs \
|
||||
npm \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
libasound2 \
|
||||
libatk-bridge2.0-0 \
|
||||
libatk1.0-0 \
|
||||
libcups2 \
|
||||
libgbm1 \
|
||||
libgtk-3-0 \
|
||||
libnss3 \
|
||||
libx11-xcb1 \
|
||||
libxcomposite1 \
|
||||
libxdamage1 \
|
||||
libxfixes3 \
|
||||
libxrandr2 \
|
||||
fonts-liberation \
|
||||
&& break; \
|
||||
if [ "$attempt" -eq 3 ]; then exit 1; fi; \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
sleep "$((attempt * 5))"; \
|
||||
apt-get update; \
|
||||
done; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -19,4 +50,9 @@ RUN pip install --upgrade pip && pip install -r requirements/base.txt
|
||||
|
||||
COPY . /app
|
||||
|
||||
ARG GIT_SHA=unknown
|
||||
ARG BUILD_ID=unknown
|
||||
ENV GIT_SHA=${GIT_SHA} \
|
||||
BUILD_ID=${BUILD_ID}
|
||||
|
||||
CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]
|
||||
|
||||
123
Makefile
Normal file
123
Makefile
Normal file
@@ -0,0 +1,123 @@
|
||||
DC = docker compose -f /srv/sum/nohype/docker-compose.prod.yml
|
||||
WEB = $(DC) exec web
|
||||
MANAGE = $(WEB) python manage.py
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
# ── Help ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) \
|
||||
| awk 'BEGIN {FS = ":.*?## "}; {printf " \033[36m%-28s\033[0m %s\n", $$1, $$2}' \
|
||||
| sort
|
||||
|
||||
# ── Docker ────────────────────────────────────────────────────────────────────
|
||||
|
||||
.PHONY: build
|
||||
build: ## Build / rebuild images
|
||||
$(DC) build
|
||||
|
||||
.PHONY: up
|
||||
up: ## Start services (detached)
|
||||
$(DC) up -d
|
||||
|
||||
.PHONY: run
|
||||
run: ## Start services in foreground (with logs)
|
||||
$(DC) up
|
||||
|
||||
.PHONY: down
|
||||
down: ## Stop and remove containers
|
||||
$(DC) down
|
||||
|
||||
.PHONY: restart
|
||||
restart: ## Restart all services
|
||||
$(DC) restart
|
||||
|
||||
.PHONY: logs
|
||||
logs: ## Tail logs for all services (Ctrl-C to stop)
|
||||
$(DC) logs -f
|
||||
|
||||
.PHONY: logs-web
|
||||
logs-web: ## Tail web service logs
|
||||
$(DC) logs -f web
|
||||
|
||||
.PHONY: ps
|
||||
ps: ## Show running containers
|
||||
$(DC) ps
|
||||
|
||||
# ── Django ────────────────────────────────────────────────────────────────────
|
||||
|
||||
.PHONY: migrate
|
||||
migrate: ## Apply database migrations
|
||||
$(MANAGE) migrate --noinput
|
||||
|
||||
.PHONY: makemigrations
|
||||
makemigrations: ## Create new migrations (pass app= to target an app)
|
||||
$(MANAGE) makemigrations $(app)
|
||||
|
||||
.PHONY: showmigrations
|
||||
showmigrations: ## List all migrations and their status
|
||||
$(MANAGE) showmigrations
|
||||
|
||||
.PHONY: createsuperuser
|
||||
createsuperuser: ## Create a Django superuser interactively
|
||||
$(MANAGE) createsuperuser
|
||||
|
||||
.PHONY: collectstatic
|
||||
collectstatic: ## Collect static files
|
||||
$(MANAGE) collectstatic --noinput
|
||||
|
||||
.PHONY: shell
|
||||
shell: ## Open a Django shell (inside the web container)
|
||||
$(MANAGE) shell
|
||||
|
||||
.PHONY: dbshell
|
||||
dbshell: ## Open a Django database shell
|
||||
$(MANAGE) dbshell
|
||||
|
||||
.PHONY: bash
|
||||
bash: ## Open a bash shell inside the web container
|
||||
$(WEB) bash
|
||||
|
||||
.PHONY: psql
|
||||
psql: ## Open a psql shell in the db container
|
||||
$(DC) exec db psql -U nohype -d nohype
|
||||
|
||||
# ── Tailwind ──────────────────────────────────────────────────────────────────
|
||||
|
||||
.PHONY: tailwind-install
|
||||
tailwind-install: ## Install Tailwind npm dependencies
|
||||
$(MANAGE) tailwind install --no-input
|
||||
|
||||
.PHONY: tailwind-build
|
||||
tailwind-build: ## Build Tailwind CSS
|
||||
$(MANAGE) tailwind build
|
||||
|
||||
.PHONY: tailwind-watch
|
||||
tailwind-watch: ## Watch and rebuild Tailwind CSS on changes
|
||||
$(MANAGE) tailwind start
|
||||
|
||||
# ── Testing ───────────────────────────────────────────────────────────────────
|
||||
|
||||
.PHONY: test
|
||||
test: ## Run unit/integration tests with pytest
|
||||
$(DC) exec web pytest $(args)
|
||||
|
||||
.PHONY: test-e2e
|
||||
test-e2e: ## Run Playwright E2E tests
|
||||
$(DC) exec web pytest e2e/ $(args)
|
||||
|
||||
# ── Custom management commands ────────────────────────────────────────────────
|
||||
|
||||
.PHONY: seed
|
||||
seed: ## Seed deterministic E2E content
|
||||
$(MANAGE) seed_e2e_content
|
||||
|
||||
.PHONY: check-content
|
||||
check-content: ## Validate live content integrity
|
||||
$(MANAGE) check_content_integrity
|
||||
|
||||
.PHONY: purge-comments
|
||||
purge-comments: ## Purge old comment personal data (pass months=N to override default 24)
|
||||
$(MANAGE) purge_old_comment_data $(if $(months),--months $(months),)
|
||||
10
README.md
10
README.md
@@ -48,6 +48,8 @@ git pull origin main
|
||||
pip install -r requirements/production.txt
|
||||
python manage.py migrate --run-syncdb
|
||||
python manage.py collectstatic --noinput
|
||||
python manage.py tailwind build
|
||||
python manage.py check_content_integrity
|
||||
sudo systemctl reload gunicorn
|
||||
```
|
||||
|
||||
@@ -55,3 +57,11 @@ sudo systemctl reload gunicorn
|
||||
|
||||
- PostgreSQL dump daily: `pg_dump | gzip > backup-$(date +%Y%m%d).sql.gz`
|
||||
- `MEDIA_ROOT` rsynced offsite daily
|
||||
- Restore DB: `gunzip -c backup-YYYYMMDD.sql.gz | psql "$DATABASE_URL"`
|
||||
- Restore media: `rsync -avz <backup-host>:/path/to/media/ /srv/nohypeai/media/`
|
||||
|
||||
## Runtime Notes
|
||||
|
||||
- Keep Caddy serving `/static/` and `/media/` directly in production.
|
||||
- Keep Gunicorn behind Caddy and run from a systemd service/socket pair.
|
||||
- Use `python manage.py purge_old_comment_data --months 24` in cron for comment-data retention.
|
||||
|
||||
@@ -3,7 +3,7 @@ from django.contrib.syndication.views import Feed
|
||||
from django.shortcuts import get_object_or_404
|
||||
from taggit.models import Tag
|
||||
|
||||
from apps.blog.models import ArticlePage
|
||||
from apps.blog.models import ArticlePage, Category
|
||||
|
||||
|
||||
class AllArticlesFeed(Feed):
|
||||
@@ -11,8 +11,12 @@ class AllArticlesFeed(Feed):
|
||||
link = "/articles/"
|
||||
description = "Honest AI coding tool reviews for developers."
|
||||
|
||||
def get_object(self, request):
|
||||
self.request = request
|
||||
return None
|
||||
|
||||
def items(self):
|
||||
return ArticlePage.objects.live().order_by("-first_published_at")[:20]
|
||||
return ArticlePage.objects.live().order_by("-published_date")[:20]
|
||||
|
||||
def item_title(self, item: ArticlePage):
|
||||
return item.title
|
||||
@@ -21,21 +25,38 @@ class AllArticlesFeed(Feed):
|
||||
return item.summary
|
||||
|
||||
def item_pubdate(self, item: ArticlePage):
|
||||
return item.first_published_at
|
||||
return item.published_date or item.first_published_at
|
||||
|
||||
def item_author_name(self, item: ArticlePage):
|
||||
return item.author.name
|
||||
|
||||
def item_link(self, item: ArticlePage):
|
||||
return f"{settings.WAGTAILADMIN_BASE_URL}{item.url}"
|
||||
if hasattr(self, "request") and self.request is not None:
|
||||
full_url = item.get_full_url(self.request)
|
||||
if full_url:
|
||||
return full_url
|
||||
return f"{settings.WAGTAILADMIN_BASE_URL.rstrip('/')}{item.url}"
|
||||
|
||||
|
||||
class TagArticlesFeed(AllArticlesFeed):
|
||||
def get_object(self, request, tag_slug: str):
|
||||
self.request = request
|
||||
return get_object_or_404(Tag, slug=tag_slug)
|
||||
|
||||
def title(self, obj):
|
||||
return f"No Hype AI — {obj.name}"
|
||||
|
||||
def items(self, obj):
|
||||
return ArticlePage.objects.live().filter(tags=obj).order_by("-first_published_at")[:20]
|
||||
return ArticlePage.objects.live().filter(tags=obj).order_by("-published_date")[:20]
|
||||
|
||||
|
||||
class CategoryArticlesFeed(AllArticlesFeed):
|
||||
def get_object(self, request, category_slug: str):
|
||||
self.request = request
|
||||
return get_object_or_404(Category, slug=category_slug)
|
||||
|
||||
def title(self, obj):
|
||||
return f"No Hype AI — {obj.name}"
|
||||
|
||||
def items(self, obj):
|
||||
return ArticlePage.objects.live().filter(category=obj).order_by("-published_date")[:20]
|
||||
|
||||
86
apps/blog/migrations/0002_category_articlepage_category.py
Normal file
86
apps/blog/migrations/0002_category_articlepage_category.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# Generated by Django 5.2.11 on 2026-03-03
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def create_default_category(apps, schema_editor):
|
||||
Category = apps.get_model("blog", "Category")
|
||||
Category.objects.get_or_create(
|
||||
slug="general",
|
||||
defaults={
|
||||
"name": "General",
|
||||
"description": "General articles",
|
||||
"colour": "neutral",
|
||||
"sort_order": 0,
|
||||
"show_in_nav": True,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def assign_default_category_to_articles(apps, schema_editor):
|
||||
Category = apps.get_model("blog", "Category")
|
||||
ArticlePage = apps.get_model("blog", "ArticlePage")
|
||||
default_category = Category.objects.get(slug="general")
|
||||
ArticlePage.objects.filter(category__isnull=True).update(category=default_category)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("blog", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Category",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("name", models.CharField(max_length=100, unique=True)),
|
||||
("slug", models.SlugField(unique=True)),
|
||||
("description", models.TextField(blank=True)),
|
||||
(
|
||||
"hero_image",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="+",
|
||||
to="wagtailimages.image",
|
||||
),
|
||||
),
|
||||
(
|
||||
"colour",
|
||||
models.CharField(
|
||||
choices=[("cyan", "Cyan"), ("pink", "Pink"), ("neutral", "Neutral")],
|
||||
default="neutral",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
("sort_order", models.IntegerField(default=0)),
|
||||
("show_in_nav", models.BooleanField(default=True)),
|
||||
],
|
||||
options={"ordering": ["sort_order", "name"]},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="articlepage",
|
||||
name="category",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="+",
|
||||
to="blog.category",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(create_default_category, migrations.RunPython.noop),
|
||||
migrations.RunPython(assign_default_category_to_articles, migrations.RunPython.noop),
|
||||
migrations.AlterField(
|
||||
model_name="articlepage",
|
||||
name="category",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="+",
|
||||
to="blog.category",
|
||||
),
|
||||
),
|
||||
]
|
||||
18
apps/blog/migrations/0003_add_published_date.py
Normal file
18
apps/blog/migrations/0003_add_published_date.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.2.11 on 2026-03-03 13:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('blog', '0002_category_articlepage_category'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='articlepage',
|
||||
name='published_date',
|
||||
field=models.DateTimeField(blank=True, help_text='Display date for this article. Auto-set on first publish if left blank.', null=True),
|
||||
),
|
||||
]
|
||||
24
apps/blog/migrations/0004_backfill_published_date.py
Normal file
24
apps/blog/migrations/0004_backfill_published_date.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 5.2.11 on 2026-03-03 13:59
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def backfill_published_date(apps, schema_editor):
|
||||
schema_editor.execute(
|
||||
"UPDATE blog_articlepage SET published_date = p.first_published_at "
|
||||
"FROM wagtailcore_page p "
|
||||
"WHERE blog_articlepage.page_ptr_id = p.id "
|
||||
"AND blog_articlepage.published_date IS NULL "
|
||||
"AND p.first_published_at IS NOT NULL"
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('blog', '0003_add_published_date'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(backfill_published_date, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 5.2.12 on 2026-03-19 00:10
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('blog', '0004_backfill_published_date'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='category',
|
||||
options={'ordering': ['sort_order', 'name'], 'verbose_name_plural': 'categories'},
|
||||
),
|
||||
]
|
||||
@@ -1,23 +1,50 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import re
|
||||
from math import ceil
|
||||
from typing import Any
|
||||
|
||||
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
|
||||
from django.db import models
|
||||
from django.db.models import CASCADE, PROTECT, SET_NULL
|
||||
from django.db.models import CASCADE, PROTECT, SET_NULL, Prefetch
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.html import strip_tags
|
||||
from django.utils.text import slugify
|
||||
from modelcluster.contrib.taggit import ClusterTaggableManager
|
||||
from modelcluster.fields import ParentalKey
|
||||
from taggit.models import TaggedItemBase
|
||||
from wagtail.admin.panels import FieldPanel, PageChooserPanel
|
||||
from taggit.models import Tag, TaggedItemBase
|
||||
from wagtail.admin.forms.pages import WagtailAdminPageForm
|
||||
from wagtail.admin.panels import FieldPanel, ObjectList, PageChooserPanel, TabbedInterface
|
||||
from wagtail.contrib.routable_page.models import RoutablePageMixin, route
|
||||
from wagtail.fields import RichTextField, StreamField
|
||||
from wagtail.models import Page
|
||||
from wagtail.search import index
|
||||
from wagtailseo.models import SeoMixin
|
||||
|
||||
from apps.authors.models import Author
|
||||
from apps.blog.blocks import ARTICLE_BODY_BLOCKS
|
||||
|
||||
|
||||
def _generate_summary_from_stream(body: Any, *, max_chars: int = 220) -> str:
|
||||
parts: list[str] = []
|
||||
if body is None:
|
||||
return ""
|
||||
for block in body:
|
||||
if getattr(block, "block_type", None) == "code":
|
||||
continue
|
||||
value = getattr(block, "value", block)
|
||||
text = value.source if hasattr(value, "source") else str(value)
|
||||
clean_text = strip_tags(text)
|
||||
if clean_text:
|
||||
parts.append(clean_text)
|
||||
summary = re.sub(r"\s+", " ", " ".join(parts)).strip()
|
||||
if len(summary) <= max_chars:
|
||||
return summary
|
||||
truncated = summary[:max_chars].rsplit(" ", 1)[0].strip()
|
||||
return truncated or summary[:max_chars].strip()
|
||||
|
||||
|
||||
class HomePage(Page):
|
||||
featured_article = models.ForeignKey(
|
||||
"blog.ArticlePage", null=True, blank=True, on_delete=SET_NULL, related_name="+"
|
||||
@@ -31,20 +58,27 @@ class HomePage(Page):
|
||||
|
||||
def get_context(self, request, *args, **kwargs):
|
||||
ctx = super().get_context(request, *args, **kwargs)
|
||||
articles = (
|
||||
articles_qs = (
|
||||
ArticlePage.objects.live()
|
||||
.public()
|
||||
.select_related("author")
|
||||
.select_related("author", "category")
|
||||
.prefetch_related("tags__metadata")
|
||||
.order_by("-first_published_at")
|
||||
.order_by("-published_date")
|
||||
)
|
||||
articles = list(articles_qs[:5])
|
||||
ctx["featured_article"] = self.featured_article
|
||||
ctx["latest_articles"] = articles[:5]
|
||||
ctx["latest_articles"] = articles
|
||||
ctx["more_articles"] = articles[:3]
|
||||
ctx["available_tags"] = (
|
||||
Tag.objects.filter(
|
||||
id__in=ArticlePage.objects.live().public().values_list("tags__id", flat=True)
|
||||
).distinct().order_by("name")
|
||||
)
|
||||
ctx["available_categories"] = Category.objects.filter(show_in_nav=True).order_by("sort_order", "name")
|
||||
return ctx
|
||||
|
||||
|
||||
class ArticleIndexPage(Page):
|
||||
class ArticleIndexPage(RoutablePageMixin, Page):
|
||||
parent_page_types = ["blog.HomePage"]
|
||||
subpage_types = ["blog.ArticlePage"]
|
||||
ARTICLES_PER_PAGE = 12
|
||||
@@ -53,15 +87,27 @@ class ArticleIndexPage(Page):
|
||||
return (
|
||||
ArticlePage.objects.child_of(self)
|
||||
.live()
|
||||
.select_related("author")
|
||||
.select_related("author", "category")
|
||||
.prefetch_related("tags__metadata")
|
||||
.order_by("-first_published_at")
|
||||
.order_by("-published_date")
|
||||
)
|
||||
|
||||
def get_context(self, request, *args, **kwargs):
|
||||
ctx = super().get_context(request, *args, **kwargs)
|
||||
def get_category_url(self, category):
|
||||
return f"{self.url}category/{category.slug}/"
|
||||
|
||||
def get_listing_context(self, request, active_category=None):
|
||||
tag_slug = request.GET.get("tag")
|
||||
articles = self.get_articles()
|
||||
available_categories = Category.objects.order_by("sort_order", "name")
|
||||
category_links = [
|
||||
{"category": category, "url": self.get_category_url(category)}
|
||||
for category in available_categories
|
||||
]
|
||||
if active_category:
|
||||
articles = articles.filter(category=active_category)
|
||||
available_tags = (
|
||||
Tag.objects.filter(id__in=articles.values_list("tags__id", flat=True)).distinct().order_by("name")
|
||||
)
|
||||
if tag_slug:
|
||||
articles = articles.filter(tags__slug=tag_slug)
|
||||
paginator = Paginator(articles, self.ARTICLES_PER_PAGE)
|
||||
@@ -72,9 +118,25 @@ class ArticleIndexPage(Page):
|
||||
page_obj = paginator.page(1)
|
||||
except EmptyPage:
|
||||
page_obj = paginator.page(paginator.num_pages)
|
||||
ctx["articles"] = page_obj
|
||||
ctx["paginator"] = paginator
|
||||
ctx["active_tag"] = tag_slug
|
||||
return {
|
||||
"articles": page_obj,
|
||||
"paginator": paginator,
|
||||
"active_tag": tag_slug,
|
||||
"available_tags": available_tags,
|
||||
"available_categories": available_categories,
|
||||
"category_links": category_links,
|
||||
"active_category": active_category,
|
||||
"active_category_url": self.get_category_url(active_category) if active_category else "",
|
||||
}
|
||||
|
||||
@route(r"^category/(?P<category_slug>[-\w]+)/$")
|
||||
def category_listing(self, request, category_slug):
|
||||
category = get_object_or_404(Category, slug=category_slug)
|
||||
return self.render(request, context_overrides=self.get_listing_context(request, active_category=category))
|
||||
|
||||
def get_context(self, request, *args, **kwargs):
|
||||
ctx = super().get_context(request, *args, **kwargs)
|
||||
ctx.update(self.get_listing_context(request))
|
||||
return ctx
|
||||
|
||||
|
||||
@@ -82,26 +144,242 @@ class ArticleTag(TaggedItemBase):
|
||||
content_object = ParentalKey("blog.ArticlePage", related_name="tagged_items", on_delete=CASCADE)
|
||||
|
||||
|
||||
class Category(models.Model):
|
||||
COLOUR_CHOICES = [("cyan", "Cyan"), ("pink", "Pink"), ("neutral", "Neutral")]
|
||||
|
||||
name = models.CharField(max_length=100, unique=True)
|
||||
slug = models.SlugField(unique=True)
|
||||
description = models.TextField(blank=True)
|
||||
hero_image = models.ForeignKey(
|
||||
"wagtailimages.Image", null=True, blank=True, on_delete=SET_NULL, related_name="+"
|
||||
)
|
||||
colour = models.CharField(max_length=20, choices=COLOUR_CHOICES, default="neutral")
|
||||
sort_order = models.IntegerField(default=0)
|
||||
show_in_nav = models.BooleanField(default=True)
|
||||
|
||||
panels = [
|
||||
FieldPanel("name"),
|
||||
FieldPanel("slug"),
|
||||
FieldPanel("description"),
|
||||
FieldPanel("hero_image"),
|
||||
FieldPanel("colour"),
|
||||
FieldPanel("sort_order"),
|
||||
FieldPanel("show_in_nav"),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
ordering = ["sort_order", "name"]
|
||||
verbose_name_plural = "categories"
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
# ── Tag colour palette ────────────────────────────────────────────────────────
|
||||
# Deterministic hash-based colour assignment for tags. Each entry is a dict
|
||||
# with Tailwind CSS class strings for bg, text, and border.
|
||||
|
||||
TAG_COLOUR_PALETTE: list[dict[str, str]] = [
|
||||
{
|
||||
"bg": "bg-brand-cyan/10",
|
||||
"text": "text-brand-cyan",
|
||||
"border": "border-brand-cyan/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-brand-pink/10",
|
||||
"text": "text-brand-pink",
|
||||
"border": "border-brand-pink/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-amber-500/10",
|
||||
"text": "text-amber-400",
|
||||
"border": "border-amber-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-emerald-500/10",
|
||||
"text": "text-emerald-400",
|
||||
"border": "border-emerald-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-violet-500/10",
|
||||
"text": "text-violet-400",
|
||||
"border": "border-violet-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-rose-500/10",
|
||||
"text": "text-rose-400",
|
||||
"border": "border-rose-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-sky-500/10",
|
||||
"text": "text-sky-400",
|
||||
"border": "border-sky-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-lime-500/10",
|
||||
"text": "text-lime-400",
|
||||
"border": "border-lime-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-orange-500/10",
|
||||
"text": "text-orange-400",
|
||||
"border": "border-orange-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-fuchsia-500/10",
|
||||
"text": "text-fuchsia-400",
|
||||
"border": "border-fuchsia-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-teal-500/10",
|
||||
"text": "text-teal-400",
|
||||
"border": "border-teal-500/20",
|
||||
},
|
||||
{
|
||||
"bg": "bg-indigo-500/10",
|
||||
"text": "text-indigo-400",
|
||||
"border": "border-indigo-500/20",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def get_auto_tag_colour_css(tag_name: str) -> dict[str, str]:
|
||||
"""Deterministically assign a colour from the palette based on tag name."""
|
||||
digest = hashlib.md5(tag_name.lower().encode(), usedforsecurity=False).hexdigest() # noqa: S324
|
||||
index = int(digest, 16) % len(TAG_COLOUR_PALETTE)
|
||||
return TAG_COLOUR_PALETTE[index]
|
||||
|
||||
|
||||
class TagMetadata(models.Model):
|
||||
COLOUR_CHOICES = [("cyan", "Cyan"), ("pink", "Pink"), ("neutral", "Neutral")]
|
||||
|
||||
tag = models.OneToOneField("taggit.Tag", on_delete=CASCADE, related_name="metadata")
|
||||
colour = models.CharField(max_length=20, choices=COLOUR_CHOICES, default="neutral")
|
||||
|
||||
@classmethod
|
||||
def get_fallback_css(cls) -> dict[str, str]:
|
||||
return {"bg": "bg-zinc-100", "text": "text-zinc-800"}
|
||||
|
||||
def get_css_classes(self) -> dict[str, str]:
|
||||
mapping = {
|
||||
"cyan": {"bg": "bg-cyan-100", "text": "text-cyan-900"},
|
||||
"pink": {"bg": "bg-pink-100", "text": "text-pink-900"},
|
||||
"neutral": self.get_fallback_css(),
|
||||
"cyan": {
|
||||
"bg": "bg-brand-cyan/10",
|
||||
"text": "text-brand-cyan",
|
||||
"border": "border-brand-cyan/20",
|
||||
},
|
||||
"pink": {
|
||||
"bg": "bg-brand-pink/10",
|
||||
"text": "text-brand-pink",
|
||||
"border": "border-brand-pink/20",
|
||||
},
|
||||
"neutral": {
|
||||
"bg": "bg-zinc-800 dark:bg-zinc-100",
|
||||
"text": "text-white dark:text-black",
|
||||
"border": "border-zinc-600/20 dark:border-zinc-400/20",
|
||||
},
|
||||
}
|
||||
return mapping.get(self.colour, self.get_fallback_css())
|
||||
css = mapping.get(self.colour)
|
||||
if css is not None:
|
||||
return css
|
||||
return get_auto_tag_colour_css(self.tag.name)
|
||||
|
||||
|
||||
class ArticlePageAdminForm(WagtailAdminPageForm):
|
||||
SUMMARY_MAX_CHARS = 220
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
for name in ("slug", "author", "category", "summary"):
|
||||
if name in self.fields:
|
||||
self.fields[name].required = False
|
||||
|
||||
default_author = self._get_default_author(create=False)
|
||||
if default_author and not self.initial.get("author"):
|
||||
self.initial["author"] = default_author.pk
|
||||
|
||||
default_category = self._get_default_category(create=False)
|
||||
if default_category and not self.initial.get("category"):
|
||||
self.initial["category"] = default_category.pk
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = getattr(self, "cleaned_data", {})
|
||||
self._apply_defaults(cleaned_data)
|
||||
self.cleaned_data = cleaned_data
|
||||
|
||||
cleaned_data = super().clean()
|
||||
self._apply_defaults(cleaned_data)
|
||||
|
||||
if not cleaned_data.get("slug"):
|
||||
self.add_error("slug", "Slug is required.")
|
||||
if not cleaned_data.get("author"):
|
||||
self.add_error("author", "Author is required.")
|
||||
if not cleaned_data.get("category"):
|
||||
self.add_error("category", "Category is required.")
|
||||
if not cleaned_data.get("summary"):
|
||||
self.add_error("summary", "Summary is required.")
|
||||
return cleaned_data
|
||||
|
||||
def _apply_defaults(self, cleaned_data: dict[str, Any]) -> dict[str, Any]:
|
||||
title = (cleaned_data.get("title") or "").strip()
|
||||
|
||||
if not cleaned_data.get("slug") and title:
|
||||
cleaned_data["slug"] = self._build_unique_page_slug(title)
|
||||
if not cleaned_data.get("author"):
|
||||
cleaned_data["author"] = self._get_default_author(create=True)
|
||||
if not cleaned_data.get("category"):
|
||||
cleaned_data["category"] = self._get_default_category(create=True)
|
||||
if not cleaned_data.get("summary"):
|
||||
cleaned_data["summary"] = _generate_summary_from_stream(
|
||||
cleaned_data.get("body"),
|
||||
max_chars=self.SUMMARY_MAX_CHARS,
|
||||
) or title
|
||||
if not cleaned_data.get("search_description") and cleaned_data.get("summary"):
|
||||
cleaned_data["search_description"] = cleaned_data["summary"]
|
||||
|
||||
return cleaned_data
|
||||
|
||||
def _get_default_author(self, *, create: bool) -> Author | None:
|
||||
user = self.for_user
|
||||
if not user or not user.is_authenticated:
|
||||
return None
|
||||
existing = Author.objects.filter(user=user).first()
|
||||
if existing or not create:
|
||||
return existing
|
||||
|
||||
base_name = (user.get_full_name() or user.get_username() or f"user-{user.pk}").strip()
|
||||
base_slug = slugify(base_name) or f"user-{user.pk}"
|
||||
slug = base_slug
|
||||
suffix = 2
|
||||
while Author.objects.filter(slug=slug).exists():
|
||||
slug = f"{base_slug}-{suffix}"
|
||||
suffix += 1
|
||||
return Author.objects.create(user=user, name=base_name, slug=slug)
|
||||
|
||||
def _get_default_category(self, *, create: bool):
|
||||
existing = Category.objects.filter(slug="general").first()
|
||||
if existing or not create:
|
||||
return existing
|
||||
category, _ = Category.objects.get_or_create(
|
||||
slug="general",
|
||||
defaults={"name": "General", "description": "General articles", "colour": "neutral"},
|
||||
)
|
||||
return category
|
||||
|
||||
def _build_unique_page_slug(self, title: str) -> str:
|
||||
base_slug = slugify(title) or "article"
|
||||
parent_page = self.parent_page
|
||||
if parent_page is None and self.instance.pk:
|
||||
parent_page = self.instance.get_parent()
|
||||
if parent_page is None:
|
||||
return base_slug
|
||||
|
||||
sibling_pages = parent_page.get_children().exclude(pk=self.instance.pk)
|
||||
slug = base_slug
|
||||
suffix = 2
|
||||
while sibling_pages.filter(slug=slug).exists():
|
||||
slug = f"{base_slug}-{suffix}"
|
||||
suffix += 1
|
||||
return slug
|
||||
|
||||
|
||||
class ArticlePage(SeoMixin, Page):
|
||||
category = models.ForeignKey("blog.Category", on_delete=PROTECT, related_name="+")
|
||||
author = models.ForeignKey("authors.Author", on_delete=PROTECT)
|
||||
hero_image = models.ForeignKey(
|
||||
"wagtailimages.Image", null=True, blank=True, on_delete=SET_NULL, related_name="+"
|
||||
@@ -111,27 +389,109 @@ class ArticlePage(SeoMixin, Page):
|
||||
tags = ClusterTaggableManager(through="blog.ArticleTag", blank=True)
|
||||
read_time_mins = models.PositiveIntegerField(editable=False, default=1)
|
||||
comments_enabled = models.BooleanField(default=True)
|
||||
published_date = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Display date for this article. Auto-set on first publish if left blank.",
|
||||
)
|
||||
|
||||
parent_page_types = ["blog.ArticleIndexPage"]
|
||||
subpage_types: list[str] = []
|
||||
base_form_class = ArticlePageAdminForm
|
||||
|
||||
content_panels = Page.content_panels + [
|
||||
FieldPanel("author"),
|
||||
FieldPanel("hero_image"),
|
||||
content_panels = [
|
||||
FieldPanel("title"),
|
||||
FieldPanel("summary"),
|
||||
FieldPanel("body"),
|
||||
]
|
||||
|
||||
metadata_panels = [
|
||||
FieldPanel("category"),
|
||||
FieldPanel("author"),
|
||||
FieldPanel("tags"),
|
||||
FieldPanel("hero_image"),
|
||||
FieldPanel("comments_enabled"),
|
||||
]
|
||||
|
||||
promote_panels = Page.promote_panels + SeoMixin.seo_panels
|
||||
publishing_panels = [
|
||||
FieldPanel("published_date"),
|
||||
FieldPanel("go_live_at"),
|
||||
FieldPanel("expire_at"),
|
||||
]
|
||||
|
||||
search_fields = Page.search_fields
|
||||
edit_handler = TabbedInterface(
|
||||
[
|
||||
ObjectList(content_panels, heading="Content"),
|
||||
ObjectList(metadata_panels, heading="Metadata"),
|
||||
ObjectList(publishing_panels, heading="Publishing"),
|
||||
ObjectList(SeoMixin.seo_panels, heading="SEO"),
|
||||
]
|
||||
)
|
||||
|
||||
search_fields = Page.search_fields + [
|
||||
index.SearchField("summary"),
|
||||
index.SearchField("body_text", es_extra={"analyzer": "english"}),
|
||||
index.AutocompleteField("title"),
|
||||
index.RelatedFields("tags", [
|
||||
index.SearchField("name"),
|
||||
]),
|
||||
index.FilterField("category"),
|
||||
index.FilterField("published_date"),
|
||||
]
|
||||
|
||||
@property
|
||||
def body_text(self) -> str:
|
||||
"""Extract prose text from body StreamField, excluding code blocks."""
|
||||
parts: list[str] = []
|
||||
for block in self.body:
|
||||
if block.block_type == "code":
|
||||
continue
|
||||
value = block.value
|
||||
text = value.source if hasattr(value, "source") else str(value)
|
||||
parts.append(text)
|
||||
return " ".join(parts)
|
||||
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
if not getattr(self, "slug", "") and self.title:
|
||||
self.slug = self._auto_slug_from_title()
|
||||
if not self.category_id:
|
||||
self.category, _ = Category.objects.get_or_create(
|
||||
slug="general",
|
||||
defaults={"name": "General", "description": "General articles", "colour": "neutral"},
|
||||
)
|
||||
if not (self.summary or "").strip():
|
||||
self.summary = _generate_summary_from_stream(self.body) or self.title
|
||||
if not getattr(self, "search_description", "") and self.summary:
|
||||
self.search_description = self.summary
|
||||
if not self.published_date and self.first_published_at:
|
||||
self.published_date = self.first_published_at
|
||||
if self._should_refresh_read_time():
|
||||
self.read_time_mins = self._compute_read_time()
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
def _auto_slug_from_title(self) -> str:
|
||||
base_slug = slugify(self.title) or "article"
|
||||
parent = self.get_parent() if self.pk else None
|
||||
if parent is None:
|
||||
return base_slug
|
||||
sibling_pages = parent.get_children().exclude(pk=self.pk)
|
||||
slug = base_slug
|
||||
suffix = 2
|
||||
while sibling_pages.filter(slug=slug).exists():
|
||||
slug = f"{base_slug}-{suffix}"
|
||||
suffix += 1
|
||||
return slug
|
||||
|
||||
def _should_refresh_read_time(self) -> bool:
|
||||
if not self.pk:
|
||||
return True
|
||||
|
||||
previous = type(self).objects.only("body").filter(pk=self.pk).first()
|
||||
if previous is None:
|
||||
return True
|
||||
|
||||
return previous.body_text != self.body_text
|
||||
|
||||
def _compute_read_time(self) -> int:
|
||||
words = []
|
||||
for block in self.body:
|
||||
@@ -153,14 +513,14 @@ class ArticlePage(SeoMixin, Page):
|
||||
.filter(tags__in=tag_ids)
|
||||
.exclude(pk=self.pk)
|
||||
.distinct()
|
||||
.order_by("-first_published_at")[:count]
|
||||
.order_by("-published_date")[:count]
|
||||
)
|
||||
if len(related) < count:
|
||||
exclude_ids = [a.pk for a in related] + [self.pk]
|
||||
fallback = list(
|
||||
ArticlePage.objects.live()
|
||||
.exclude(pk__in=exclude_ids)
|
||||
.order_by("-first_published_at")[: count - len(related)]
|
||||
.order_by("-published_date")[: count - len(related)]
|
||||
)
|
||||
return related + fallback
|
||||
return related
|
||||
@@ -168,9 +528,20 @@ class ArticlePage(SeoMixin, Page):
|
||||
def get_context(self, request, *args, **kwargs):
|
||||
ctx = super().get_context(request, *args, **kwargs)
|
||||
ctx["related_articles"] = self.get_related_articles()
|
||||
ctx["approved_comments"] = self.comments.filter(is_approved=True, parent__isnull=True).select_related(
|
||||
"parent"
|
||||
from django.conf import settings
|
||||
|
||||
from apps.comments.models import Comment
|
||||
from apps.comments.views import _annotate_reaction_counts, _get_session_key
|
||||
|
||||
approved_replies = Comment.objects.filter(is_approved=True).select_related("parent")
|
||||
comments = list(
|
||||
self.comments.filter(is_approved=True, parent__isnull=True).prefetch_related(
|
||||
Prefetch("replies", queryset=approved_replies)
|
||||
)
|
||||
)
|
||||
_annotate_reaction_counts(comments, _get_session_key(request))
|
||||
ctx["approved_comments"] = comments
|
||||
ctx["turnstile_site_key"] = getattr(settings, "TURNSTILE_SITE_KEY", "")
|
||||
return ctx
|
||||
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ class ArticlePageFactory(wagtail_factories.PageFactory):
|
||||
summary = "Summary"
|
||||
body = [("rich_text", "<p>Hello world</p>")]
|
||||
first_published_at = factory.LazyFunction(timezone.now)
|
||||
published_date = factory.LazyFunction(timezone.now)
|
||||
|
||||
|
||||
class LegalIndexPageFactory(wagtail_factories.PageFactory):
|
||||
|
||||
495
apps/blog/tests/test_admin_experience.py
Normal file
495
apps/blog/tests/test_admin_experience.py
Normal file
@@ -0,0 +1,495 @@
|
||||
from datetime import timedelta
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
from django.contrib import messages
|
||||
from django.contrib.messages.storage.fallback import FallbackStorage
|
||||
from django.contrib.sessions.middleware import SessionMiddleware
|
||||
from django.test import override_settings
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage, ArticlePageAdminForm, Category
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_published_date_auto_set_on_first_publish(home_page):
|
||||
"""published_date should be auto-populated from first_published_at on first publish."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Auto Date",
|
||||
slug="auto-date",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
article.refresh_from_db()
|
||||
assert article.published_date is not None
|
||||
assert article.published_date == article.first_published_at
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_published_date_preserved_when_explicitly_set(home_page):
|
||||
"""An explicitly set published_date should not be overwritten on save."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
custom_date = timezone.now() - timedelta(days=30)
|
||||
article = ArticlePage(
|
||||
title="Custom Date",
|
||||
slug="custom-date",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
published_date=custom_date,
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
article.refresh_from_db()
|
||||
assert article.published_date == custom_date
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_homepage_orders_articles_by_published_date(home_page):
|
||||
"""HomePage context should list articles ordered by -published_date."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
|
||||
older = ArticlePage(
|
||||
title="Older",
|
||||
slug="older",
|
||||
author=author,
|
||||
summary="s",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
published_date=timezone.now() - timedelta(days=10),
|
||||
)
|
||||
index.add_child(instance=older)
|
||||
older.save_revision().publish()
|
||||
|
||||
newer = ArticlePage(
|
||||
title="Newer",
|
||||
slug="newer",
|
||||
author=author,
|
||||
summary="s",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
published_date=timezone.now(),
|
||||
)
|
||||
index.add_child(instance=newer)
|
||||
newer.save_revision().publish()
|
||||
|
||||
ctx = home_page.get_context(type("Req", (), {"GET": {}})())
|
||||
titles = [a.title for a in ctx["latest_articles"]]
|
||||
assert titles.index("Newer") < titles.index("Older")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_index_orders_by_published_date(home_page, rf):
|
||||
"""ArticleIndexPage.get_articles should order by -published_date."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
|
||||
old = ArticlePage(
|
||||
title="Old",
|
||||
slug="old",
|
||||
author=author,
|
||||
summary="s",
|
||||
body=[("rich_text", "<p>b</p>")],
|
||||
published_date=timezone.now() - timedelta(days=5),
|
||||
)
|
||||
index.add_child(instance=old)
|
||||
old.save_revision().publish()
|
||||
|
||||
new = ArticlePage(
|
||||
title="New",
|
||||
slug="new",
|
||||
author=author,
|
||||
summary="s",
|
||||
body=[("rich_text", "<p>b</p>")],
|
||||
published_date=timezone.now(),
|
||||
)
|
||||
index.add_child(instance=new)
|
||||
new.save_revision().publish()
|
||||
|
||||
articles = list(index.get_articles())
|
||||
assert articles[0].title == "New"
|
||||
assert articles[1].title == "Old"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_feed_uses_published_date(article_page):
|
||||
"""RSS feed item_pubdate should use published_date."""
|
||||
from apps.blog.feeds import AllArticlesFeed
|
||||
|
||||
feed = AllArticlesFeed()
|
||||
assert feed.item_pubdate(article_page) == article_page.published_date
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_articles_listing_viewset_loads(client, django_user_model, home_page):
|
||||
"""The Articles PageListingViewSet index page should load."""
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin", email="admin@example.com", password="admin-pass"
|
||||
)
|
||||
client.force_login(admin)
|
||||
response = client.get("/cms/articles/")
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_articles_listing_shows_articles(client, django_user_model, home_page):
|
||||
"""The Articles listing should show existing articles."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Listed Article",
|
||||
slug="listed-article",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin", email="admin@example.com", password="admin-pass"
|
||||
)
|
||||
client.force_login(admin)
|
||||
response = client.get("/cms/articles/")
|
||||
assert response.status_code == 200
|
||||
assert "Listed Article" in response.content.decode()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_dashboard_panel_renders(client, django_user_model, home_page):
|
||||
"""The Wagtail admin dashboard should include the articles summary panel."""
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin", email="admin@example.com", password="admin-pass"
|
||||
)
|
||||
client.force_login(admin)
|
||||
response = client.get("/cms/")
|
||||
assert response.status_code == 200
|
||||
content = response.content.decode()
|
||||
assert "Articles overview" in content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_dashboard_panel_shows_drafts(client, django_user_model, home_page):
|
||||
"""Dashboard panel should list draft articles."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
draft = ArticlePage(
|
||||
title="My Draft Post",
|
||||
slug="draft-post",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=draft)
|
||||
draft.save_revision() # save revision but don't publish
|
||||
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin", email="admin@example.com", password="admin-pass"
|
||||
)
|
||||
client.force_login(admin)
|
||||
response = client.get("/cms/")
|
||||
content = response.content.decode()
|
||||
assert "My Draft Post" in content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_article_edit_page_has_tabbed_interface(client, django_user_model, home_page):
|
||||
"""ArticlePage editor should have tabbed panels (Content, Metadata, Publishing, SEO)."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Tabbed",
|
||||
slug="tabbed",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin", email="admin@example.com", password="admin-pass"
|
||||
)
|
||||
client.force_login(admin)
|
||||
response = client.get(f"/cms/pages/{article.pk}/edit/")
|
||||
content = response.content.decode()
|
||||
assert response.status_code == 200
|
||||
assert "Content" in content
|
||||
assert "Metadata" in content
|
||||
assert "Publishing" in content
|
||||
assert "SEO" in content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_articles_listing_has_status_filter(client, django_user_model, home_page):
|
||||
"""The Articles listing should accept status filter parameter."""
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin", email="admin@example.com", password="admin-pass"
|
||||
)
|
||||
client.force_login(admin)
|
||||
response = client.get("/cms/articles/?status=live")
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_articles_listing_has_tag_filter(client, django_user_model, home_page):
|
||||
"""The Articles listing should accept tag filter parameter."""
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin", email="admin@example.com", password="admin-pass"
|
||||
)
|
||||
client.force_login(admin)
|
||||
response = client.get("/cms/articles/?tag=1")
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_listing_default_ordering():
|
||||
"""ArticlePageListingViewSet should default to -published_date ordering."""
|
||||
from apps.blog.wagtail_hooks import ArticlePageListingViewSet
|
||||
|
||||
assert ArticlePageListingViewSet.default_ordering == "-published_date"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_search_fields_include_summary():
|
||||
"""ArticlePage.search_fields should index the summary field."""
|
||||
field_names = [
|
||||
f.field_name for f in ArticlePage.search_fields if hasattr(f, "field_name")
|
||||
]
|
||||
assert "summary" in field_names
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_admin_form_relaxes_initial_required_fields(article_index, django_user_model):
|
||||
"""Slug/author/category/summary should not block initial draft validation."""
|
||||
user = django_user_model.objects.create_user(
|
||||
username="writer",
|
||||
email="writer@example.com",
|
||||
password="writer-pass",
|
||||
)
|
||||
form_class = ArticlePage.get_edit_handler().get_form_class()
|
||||
form = form_class(parent_page=article_index, for_user=user)
|
||||
|
||||
assert form.fields["slug"].required is False
|
||||
assert form.fields["author"].required is False
|
||||
assert form.fields["category"].required is False
|
||||
assert form.fields["summary"].required is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_admin_form_clean_applies_defaults(article_index, django_user_model, monkeypatch):
|
||||
"""Form clean should populate defaults before parent validation runs."""
|
||||
user = django_user_model.objects.create_user(
|
||||
username="writer",
|
||||
email="writer@example.com",
|
||||
password="writer-pass",
|
||||
first_name="Writer",
|
||||
last_name="User",
|
||||
)
|
||||
form_class = ArticlePage.get_edit_handler().get_form_class()
|
||||
form = form_class(parent_page=article_index, for_user=user)
|
||||
|
||||
body = [
|
||||
SimpleNamespace(block_type="code", value=SimpleNamespace(raw_code="print('ignore')")),
|
||||
SimpleNamespace(block_type="rich_text", value=SimpleNamespace(source="<p>Hello world body text.</p>")),
|
||||
]
|
||||
form.cleaned_data = {
|
||||
"title": "Auto Defaults Title",
|
||||
"slug": "",
|
||||
"author": None,
|
||||
"category": None,
|
||||
"summary": "",
|
||||
"body": body,
|
||||
}
|
||||
observed = {}
|
||||
|
||||
def fake_super_clean(_self):
|
||||
observed["slug_before_parent_clean"] = _self.cleaned_data.get("slug")
|
||||
return _self.cleaned_data
|
||||
|
||||
mro = form.__class__.__mro__
|
||||
super_form_class = mro[mro.index(ArticlePageAdminForm) + 1]
|
||||
monkeypatch.setattr(super_form_class, "clean", fake_super_clean)
|
||||
cleaned = form.clean()
|
||||
|
||||
assert observed["slug_before_parent_clean"] == "auto-defaults-title"
|
||||
assert cleaned["slug"] == "auto-defaults-title"
|
||||
assert cleaned["author"] is not None
|
||||
assert cleaned["author"].user_id == user.id
|
||||
assert cleaned["category"] is not None
|
||||
assert cleaned["category"].slug == "general"
|
||||
assert cleaned["summary"] == "Hello world body text."
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_seo_tab_fields_not_duplicated():
|
||||
"""SEO tab should include each promote/SEO field only once."""
|
||||
handler = ArticlePage.get_edit_handler()
|
||||
seo_tab = next(panel for panel in handler.children if panel.heading == "SEO")
|
||||
|
||||
def flatten_field_names(panel):
|
||||
names = []
|
||||
for child in panel.children:
|
||||
if hasattr(child, "field_name"):
|
||||
names.append(child.field_name)
|
||||
else:
|
||||
names.extend(flatten_field_names(child))
|
||||
return names
|
||||
|
||||
field_names = flatten_field_names(seo_tab)
|
||||
assert field_names.count("slug") == 1
|
||||
assert field_names.count("seo_title") == 1
|
||||
assert field_names.count("search_description") == 1
|
||||
assert field_names.count("show_in_menus") == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_save_autogenerates_summary_when_missing(article_index):
|
||||
"""Model save fallback should generate summary from prose blocks."""
|
||||
category = Category.objects.create(name="Guides", slug="guides")
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Summary Auto",
|
||||
slug="summary-auto",
|
||||
author=author,
|
||||
category=category,
|
||||
summary="",
|
||||
body=[
|
||||
("code", {"language": "python", "filename": "", "raw_code": "print('skip')"}),
|
||||
("rich_text", "<p>This should become the summary text.</p>"),
|
||||
],
|
||||
)
|
||||
|
||||
article_index.add_child(instance=article)
|
||||
article.save()
|
||||
|
||||
assert article.summary == "This should become the summary text."
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_category_verbose_name_plural():
|
||||
"""Category Meta should define verbose_name_plural as 'categories'."""
|
||||
assert Category._meta.verbose_name_plural == "categories"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_snippet_category_listing_shows_categories(client, django_user_model):
|
||||
"""Categories created in the database should appear in the Snippets listing."""
|
||||
Category.objects.create(name="Reviews", slug="reviews")
|
||||
Category.objects.create(name="Tutorials", slug="tutorials")
|
||||
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin-cat", email="admin-cat@example.com", password="admin-pass"
|
||||
)
|
||||
client.force_login(admin)
|
||||
response = client.get("/cms/snippets/blog/category/")
|
||||
content = response.content.decode()
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "Reviews" in content
|
||||
assert "Tutorials" in content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_admin_form_clean_auto_populates_search_description(article_index, django_user_model, monkeypatch):
|
||||
"""Form clean should auto-populate search_description from summary."""
|
||||
user = django_user_model.objects.create_user(
|
||||
username="writer",
|
||||
email="writer@example.com",
|
||||
password="writer-pass",
|
||||
first_name="Writer",
|
||||
last_name="User",
|
||||
)
|
||||
form_class = ArticlePage.get_edit_handler().get_form_class()
|
||||
form = form_class(parent_page=article_index, for_user=user)
|
||||
|
||||
body = [
|
||||
SimpleNamespace(block_type="rich_text", value=SimpleNamespace(source="<p>Article body text.</p>")),
|
||||
]
|
||||
form.cleaned_data = {
|
||||
"title": "SEO Test",
|
||||
"slug": "",
|
||||
"author": None,
|
||||
"category": None,
|
||||
"summary": "",
|
||||
"search_description": "",
|
||||
"body": body,
|
||||
}
|
||||
|
||||
mro = form.__class__.__mro__
|
||||
super_form_class = mro[mro.index(ArticlePageAdminForm) + 1]
|
||||
monkeypatch.setattr(super_form_class, "clean", lambda _self: _self.cleaned_data)
|
||||
cleaned = form.clean()
|
||||
|
||||
assert cleaned["summary"] == "Article body text."
|
||||
assert cleaned["search_description"] == "Article body text."
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_admin_form_preserves_explicit_search_description(article_index, django_user_model, monkeypatch):
|
||||
"""Form clean should not overwrite an explicit search_description."""
|
||||
user = django_user_model.objects.create_user(
|
||||
username="writer2",
|
||||
email="writer2@example.com",
|
||||
password="writer-pass",
|
||||
)
|
||||
form_class = ArticlePage.get_edit_handler().get_form_class()
|
||||
form = form_class(parent_page=article_index, for_user=user)
|
||||
|
||||
body = [
|
||||
SimpleNamespace(block_type="rich_text", value=SimpleNamespace(source="<p>Body.</p>")),
|
||||
]
|
||||
form.cleaned_data = {
|
||||
"title": "SEO Explicit Test",
|
||||
"slug": "seo-explicit-test",
|
||||
"author": None,
|
||||
"category": None,
|
||||
"summary": "My summary.",
|
||||
"search_description": "Custom SEO text.",
|
||||
"body": body,
|
||||
}
|
||||
|
||||
mro = form.__class__.__mro__
|
||||
super_form_class = mro[mro.index(ArticlePageAdminForm) + 1]
|
||||
monkeypatch.setattr(super_form_class, "clean", lambda _self: _self.cleaned_data)
|
||||
cleaned = form.clean()
|
||||
|
||||
assert cleaned["search_description"] == "Custom SEO text."
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_page_omits_admin_messages_on_frontend(article_page, rf):
|
||||
"""Frontend templates should not render admin session messages."""
|
||||
request = rf.get(article_page.url)
|
||||
SessionMiddleware(lambda req: None).process_request(request)
|
||||
request.session.save()
|
||||
setattr(request, "_messages", FallbackStorage(request))
|
||||
messages.success(request, "Page 'Test' has been published.")
|
||||
|
||||
response = article_page.serve(request)
|
||||
response.render()
|
||||
content = response.content.decode()
|
||||
|
||||
assert "Page 'Test' has been published." not in content
|
||||
assert 'aria-label="Messages"' not in content
|
||||
@@ -1,4 +1,8 @@
|
||||
import pytest
|
||||
from django.test import override_settings
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -6,3 +10,25 @@ def test_feed_endpoint(client):
|
||||
resp = client.get("/feed/")
|
||||
assert resp.status_code == 200
|
||||
assert resp["Content-Type"].startswith("application/rss+xml")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(WAGTAILADMIN_BASE_URL="http://wrong-host.example")
|
||||
def test_feed_uses_request_host_for_item_links(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Feed Article",
|
||||
slug="feed-article",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>Body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/feed/")
|
||||
body = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert "http://localhost/articles/feed-article/" in body
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import pytest
|
||||
|
||||
from apps.blog.feeds import AllArticlesFeed
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage, Category
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -16,3 +18,32 @@ def test_all_feed_methods(article_page):
|
||||
def test_tag_feed_not_found(client):
|
||||
resp = client.get("/feed/tag/does-not-exist/")
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_category_feed_endpoint(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
category = Category.objects.create(name="Reviews", slug="reviews")
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Feed Review",
|
||||
slug="feed-review",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>Body</p>")],
|
||||
category=category,
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/feed/category/reviews/")
|
||||
assert resp.status_code == 200
|
||||
assert resp["Content-Type"].startswith("application/rss+xml")
|
||||
assert "Feed Review" in resp.content.decode()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_category_feed_not_found(client):
|
||||
resp = client.get("/feed/category/does-not-exist/")
|
||||
assert resp.status_code == 404
|
||||
|
||||
@@ -2,7 +2,15 @@ import pytest
|
||||
from django.db import IntegrityError
|
||||
from taggit.models import Tag
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage, HomePage, TagMetadata
|
||||
from apps.blog.models import (
|
||||
TAG_COLOUR_PALETTE,
|
||||
ArticleIndexPage,
|
||||
ArticlePage,
|
||||
Category,
|
||||
HomePage,
|
||||
TagMetadata,
|
||||
get_auto_tag_colour_css,
|
||||
)
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
|
||||
|
||||
@@ -37,6 +45,163 @@ def test_article_compute_read_time_excludes_code(home_page):
|
||||
def test_tag_metadata_css_and_uniqueness():
|
||||
tag = Tag.objects.create(name="llms", slug="llms")
|
||||
meta = TagMetadata.objects.create(tag=tag, colour="cyan")
|
||||
assert meta.get_css_classes()["bg"].startswith("bg-cyan")
|
||||
assert meta.get_css_classes()["bg"] == "bg-brand-cyan/10"
|
||||
with pytest.raises(IntegrityError):
|
||||
TagMetadata.objects.create(tag=tag, colour="pink")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_default_category_is_assigned(home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Categorised",
|
||||
slug="categorised",
|
||||
author=author,
|
||||
summary="s",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save()
|
||||
assert article.category.slug == "general"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_read_time_is_not_recomputed_when_body_text_is_unchanged(home_page, monkeypatch):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Stable read time",
|
||||
slug="stable-read-time",
|
||||
author=author,
|
||||
summary="s",
|
||||
body=[("rich_text", "<p>body words</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save()
|
||||
|
||||
def fail_compute():
|
||||
raise AssertionError("read time should not be recomputed when body text is unchanged")
|
||||
|
||||
monkeypatch.setattr(article, "_compute_read_time", fail_compute)
|
||||
article.title = "Retitled"
|
||||
article.save()
|
||||
article.refresh_from_db()
|
||||
|
||||
assert article.read_time_mins == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_category_ordering():
|
||||
Category.objects.get_or_create(name="General", slug="general")
|
||||
Category.objects.create(name="Z", slug="z", sort_order=2)
|
||||
Category.objects.create(name="A", slug="a", sort_order=1)
|
||||
names = list(Category.objects.values_list("name", flat=True))
|
||||
assert names == ["General", "A", "Z"]
|
||||
|
||||
|
||||
# ── Auto tag colour tests ────────────────────────────────────────────────────
|
||||
|
||||
|
||||
def test_auto_tag_colour_is_deterministic():
|
||||
"""Same tag name always produces the same colour."""
|
||||
css1 = get_auto_tag_colour_css("python")
|
||||
css2 = get_auto_tag_colour_css("python")
|
||||
assert css1 == css2
|
||||
|
||||
|
||||
def test_auto_tag_colour_is_case_insensitive():
|
||||
"""Tag colour assignment is case-insensitive."""
|
||||
assert get_auto_tag_colour_css("Python") == get_auto_tag_colour_css("python")
|
||||
|
||||
|
||||
def test_auto_tag_colour_returns_valid_palette_entry():
|
||||
"""Returned CSS dict must be from the palette."""
|
||||
css = get_auto_tag_colour_css("llms")
|
||||
assert css in TAG_COLOUR_PALETTE
|
||||
|
||||
|
||||
def test_auto_tag_colour_distributes_across_palette():
|
||||
"""Different tag names should map to multiple palette entries."""
|
||||
sample_tags = ["python", "javascript", "rust", "go", "ruby", "java",
|
||||
"typescript", "css", "html", "sql", "llms", "mlops"]
|
||||
colours = {get_auto_tag_colour_css(t)["text"] for t in sample_tags}
|
||||
assert len(colours) >= 3, "Tags should spread across at least 3 palette colours"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_tag_without_metadata_uses_auto_colour():
|
||||
"""Tags without TagMetadata should get auto-assigned colour, not neutral."""
|
||||
tag = Tag.objects.create(name="fastapi", slug="fastapi")
|
||||
expected = get_auto_tag_colour_css("fastapi")
|
||||
# Verify no metadata exists
|
||||
assert not TagMetadata.objects.filter(tag=tag).exists()
|
||||
# The template tag helper should fall back to auto colour
|
||||
from apps.core.templatetags.core_tags import _resolve_tag_css
|
||||
assert _resolve_tag_css(tag) == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_tag_with_metadata_overrides_auto_colour():
|
||||
"""Tags with explicit TagMetadata should use that colour."""
|
||||
tag = Tag.objects.create(name="django", slug="django")
|
||||
TagMetadata.objects.create(tag=tag, colour="pink")
|
||||
from apps.core.templatetags.core_tags import _resolve_tag_css
|
||||
css = _resolve_tag_css(tag)
|
||||
assert css["text"] == "text-brand-pink"
|
||||
|
||||
|
||||
# ── Auto slug tests ──────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_save_auto_generates_slug_from_title(home_page):
|
||||
"""Model save should auto-generate slug from title when slug is empty."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="My Great Article",
|
||||
slug="",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.refresh_from_db()
|
||||
assert article.slug == "my-great-article"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_save_auto_generates_search_description(article_index):
|
||||
"""Model save should populate search_description from summary."""
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="SEO Auto",
|
||||
slug="seo-auto",
|
||||
author=author,
|
||||
summary="This is the article summary.",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
article_index.add_child(instance=article)
|
||||
article.save()
|
||||
assert article.search_description == "This is the article summary."
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_save_preserves_explicit_search_description(article_index):
|
||||
"""Explicit search_description should not be overwritten."""
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="SEO Explicit",
|
||||
slug="seo-explicit",
|
||||
author=author,
|
||||
summary="Generated summary.",
|
||||
search_description="Custom SEO description.",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
article_index.add_child(instance=article)
|
||||
article.save()
|
||||
assert article.search_description == "Custom SEO description."
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import pytest
|
||||
|
||||
from apps.blog.models import TagMetadata
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_home_context_lists_articles(home_page, article_page):
|
||||
@@ -22,6 +20,7 @@ def test_get_related_articles_fallback(article_page, article_index):
|
||||
assert isinstance(related, list)
|
||||
|
||||
|
||||
def test_tag_metadata_fallback_classes():
|
||||
css = TagMetadata.get_fallback_css()
|
||||
def test_auto_tag_colour_returns_valid_css():
|
||||
from apps.blog.models import get_auto_tag_colour_css
|
||||
css = get_auto_tag_colour_css("test-tag")
|
||||
assert css["bg"].startswith("bg-")
|
||||
|
||||
140
apps/blog/tests/test_search.py
Normal file
140
apps/blog/tests/test_search.py
Normal file
@@ -0,0 +1,140 @@
|
||||
import pytest
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
from apps.blog.views import MAX_QUERY_LENGTH
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def search_articles(home_page):
|
||||
"""Create an article index with searchable articles."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
articles = []
|
||||
for title, summary in [
|
||||
("Understanding LLM Benchmarks", "A deep dive into how language models are evaluated"),
|
||||
("Local Models on Apple Silicon", "Running open-source models on your MacBook"),
|
||||
("Agent Frameworks Compared", "Comparing LangChain, CrewAI, and AutoGen"),
|
||||
]:
|
||||
a = ArticlePage(
|
||||
title=title,
|
||||
slug=title.lower().replace(" ", "-"),
|
||||
author=author,
|
||||
summary=summary,
|
||||
body=[("rich_text", f"<p>{summary} in detail.</p>")],
|
||||
)
|
||||
index.add_child(instance=a)
|
||||
a.save_revision().publish()
|
||||
articles.append(a)
|
||||
return articles
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSearchView:
|
||||
def test_empty_query_returns_no_results(self, client, home_page):
|
||||
resp = client.get("/search/")
|
||||
assert resp.status_code == 200
|
||||
assert resp.context["query"] == ""
|
||||
assert resp.context["results"] is None
|
||||
|
||||
def test_whitespace_query_returns_no_results(self, client, home_page):
|
||||
resp = client.get("/search/?q= ")
|
||||
assert resp.status_code == 200
|
||||
assert resp.context["query"] == ""
|
||||
assert resp.context["results"] is None
|
||||
|
||||
def test_search_returns_matching_articles(self, client, search_articles):
|
||||
resp = client.get("/search/?q=benchmarks")
|
||||
assert resp.status_code == 200
|
||||
assert resp.context["query"] == "benchmarks"
|
||||
assert resp.context["results"] is not None
|
||||
|
||||
def test_search_no_match_returns_empty_page(self, client, search_articles):
|
||||
resp = client.get("/search/?q=zzzznonexistent")
|
||||
assert resp.status_code == 200
|
||||
assert resp.context["query"] == "zzzznonexistent"
|
||||
# Either None or empty page object
|
||||
results = resp.context["results"]
|
||||
if results is not None:
|
||||
assert len(list(results)) == 0
|
||||
|
||||
def test_query_is_truncated_to_max_length(self, client, home_page):
|
||||
long_query = "a" * 500
|
||||
resp = client.get(f"/search/?q={long_query}")
|
||||
assert resp.status_code == 200
|
||||
assert len(resp.context["query"]) <= MAX_QUERY_LENGTH
|
||||
|
||||
def test_query_preserved_in_template(self, client, search_articles):
|
||||
resp = client.get("/search/?q=LLM")
|
||||
html = resp.content.decode()
|
||||
assert 'value="LLM"' in html
|
||||
|
||||
def test_search_results_page_renders(self, client, search_articles):
|
||||
resp = client.get("/search/?q=models")
|
||||
assert resp.status_code == 200
|
||||
html = resp.content.decode()
|
||||
assert "Search" in html
|
||||
|
||||
def test_search_url_resolves(self, client, home_page):
|
||||
from django.urls import reverse
|
||||
assert reverse("search") == "/search/"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSearchFields:
|
||||
def test_search_fields_include_summary(self):
|
||||
field_names = [
|
||||
f.field_name for f in ArticlePage.search_fields if hasattr(f, "field_name")
|
||||
]
|
||||
assert "summary" in field_names
|
||||
|
||||
def test_search_fields_include_body_text(self):
|
||||
field_names = [
|
||||
f.field_name for f in ArticlePage.search_fields if hasattr(f, "field_name")
|
||||
]
|
||||
assert "body_text" in field_names
|
||||
|
||||
def test_search_fields_include_autocomplete_title(self):
|
||||
from wagtail.search.index import AutocompleteField
|
||||
autocomplete_fields = [
|
||||
f for f in ArticlePage.search_fields if isinstance(f, AutocompleteField)
|
||||
]
|
||||
assert any(f.field_name == "title" for f in autocomplete_fields)
|
||||
|
||||
def test_search_fields_include_related_tags(self):
|
||||
from wagtail.search.index import RelatedFields
|
||||
related = [f for f in ArticlePage.search_fields if isinstance(f, RelatedFields)]
|
||||
assert any(f.field_name == "tags" for f in related)
|
||||
|
||||
def test_body_text_excludes_code_blocks(self):
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Test",
|
||||
slug="test",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[
|
||||
("rich_text", "<p>prose content here</p>"),
|
||||
("code", {"language": "python", "filename": "", "raw_code": "def secret(): pass"}),
|
||||
],
|
||||
)
|
||||
assert "prose content here" in article.body_text
|
||||
assert "secret" not in article.body_text
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSearchNavIntegration:
|
||||
def test_nav_contains_search_form(self, client, home_page):
|
||||
resp = client.get("/")
|
||||
html = resp.content.decode()
|
||||
assert 'role="search"' in html
|
||||
assert 'name="q"' in html
|
||||
assert 'placeholder="Search articles..."' in html
|
||||
|
||||
def test_article_index_contains_search_form(self, client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
resp = client.get("/articles/")
|
||||
html = resp.content.decode()
|
||||
assert 'name="q"' in html
|
||||
35
apps/blog/tests/test_seo.py
Normal file
35
apps/blog/tests/test_seo.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import pytest
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_page_renders_core_seo_meta(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="SEO Article",
|
||||
slug="seo-article",
|
||||
author=author,
|
||||
summary="Summary content",
|
||||
body=[("rich_text", "<p>Body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/seo-article/")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert '<link rel="canonical" href="http' in html
|
||||
assert 'property="og:type" content="article"' in html
|
||||
assert 'name="twitter:card" content="summary_large_image"' in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_homepage_renders_website_og_type(client, home_page):
|
||||
resp = client.get("/")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert 'property="og:type" content="website"' in html
|
||||
@@ -1,7 +1,11 @@
|
||||
import pytest
|
||||
import re
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
import pytest
|
||||
from taggit.models import Tag
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage, Category
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
from apps.comments.models import Comment
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -29,6 +33,7 @@ def test_article_index_pagination_and_tag_filter(client, home_page):
|
||||
resp = client.get("/articles/?page=2")
|
||||
assert resp.status_code == 200
|
||||
assert resp.context["articles"].number == 2
|
||||
assert "Pagination" in resp.content.decode()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -59,3 +64,234 @@ def test_article_page_related_context(client, home_page):
|
||||
resp = client.get("/articles/main/")
|
||||
assert resp.status_code == 200
|
||||
assert "related_articles" in resp.context
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_newsletter_forms_render_in_nav_and_footer(client, home_page):
|
||||
resp = client.get("/")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
# Nav has a search form instead of Subscribe CTA
|
||||
assert 'role="search"' in html
|
||||
assert 'name="q"' in html
|
||||
# Footer has Connect section with social/RSS links (no newsletter form)
|
||||
assert "Connect" in html
|
||||
assert 'name="source" value="nav"' not in html
|
||||
assert 'name="source" value="footer"' not in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_page_renders_share_links_and_newsletter_form(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Main",
|
||||
slug="main",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/main/")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert "Share on X" in html
|
||||
assert "Share on LinkedIn" in html
|
||||
assert 'data-copy-link' in html
|
||||
assert 'name="source" value="article"' in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_page_renders_approved_comments_and_reply_form(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Main",
|
||||
slug="main",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
comment = Comment.objects.create(
|
||||
article=article,
|
||||
author_name="A",
|
||||
author_email="a@example.com",
|
||||
body="Top level",
|
||||
is_approved=True,
|
||||
)
|
||||
Comment.objects.create(
|
||||
article=article,
|
||||
parent=comment,
|
||||
author_name="B",
|
||||
author_email="b@example.com",
|
||||
body="Reply",
|
||||
is_approved=True,
|
||||
)
|
||||
|
||||
resp = client.get("/articles/main/")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert "Top level" in html
|
||||
assert "Reply" in html
|
||||
assert f'name="parent_id" value="{comment.id}"' in html
|
||||
match = re.search(r'id="comments-empty-state"[^>]*class="([^"]+)"', html)
|
||||
assert match is not None
|
||||
assert "hidden" in match.group(1).split()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_page_shows_empty_state_when_no_approved_comments(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Main",
|
||||
slug="main",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/main/")
|
||||
html = resp.content.decode()
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert 'id="comments-empty-state"' in html
|
||||
assert "No comments yet. Be the first to comment." in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_page_loads_comment_client_script(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Main",
|
||||
slug="main",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/main/")
|
||||
html = resp.content.decode()
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert 'src="/static/js/comments.js"' in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_index_renders_tag_filter_controls(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Main",
|
||||
slug="main",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
tag = Tag.objects.create(name="TagOne", slug="tag-one")
|
||||
article.tags.add(tag)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert "/articles/?tag=tag-one" in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_index_category_route_filters_articles(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
reviews = Category.objects.create(name="Reviews", slug="reviews")
|
||||
tutorials = Category.objects.create(name="Tutorials", slug="tutorials")
|
||||
review_article = ArticlePage(
|
||||
title="Review A",
|
||||
slug="review-a",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
category=reviews,
|
||||
)
|
||||
tutorial_article = ArticlePage(
|
||||
title="Tutorial A",
|
||||
slug="tutorial-a",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
category=tutorials,
|
||||
)
|
||||
index.add_child(instance=review_article)
|
||||
review_article.save_revision().publish()
|
||||
index.add_child(instance=tutorial_article)
|
||||
tutorial_article.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/category/reviews/")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert "Review A" in html
|
||||
assert "Tutorial A" not in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_index_category_route_supports_tag_filter(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
reviews = Category.objects.create(name="Reviews", slug="reviews")
|
||||
keep = ArticlePage(
|
||||
title="Keep Me",
|
||||
slug="keep-me",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
category=reviews,
|
||||
)
|
||||
drop = ArticlePage(
|
||||
title="Drop Me",
|
||||
slug="drop-me",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
category=reviews,
|
||||
)
|
||||
index.add_child(instance=keep)
|
||||
keep.save_revision().publish()
|
||||
index.add_child(instance=drop)
|
||||
drop.save_revision().publish()
|
||||
target_tag = Tag.objects.create(name="Python", slug="python")
|
||||
keep.tags.add(target_tag)
|
||||
keep.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/category/reviews/?tag=python")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert "Keep Me" in html
|
||||
assert "Drop Me" not in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_index_category_route_allows_empty_existing_category(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
Category.objects.create(name="Opinion", slug="opinion")
|
||||
|
||||
resp = client.get("/articles/category/opinion/")
|
||||
assert resp.status_code == 200
|
||||
assert "No articles found." in resp.content.decode()
|
||||
|
||||
43
apps/blog/views.py
Normal file
43
apps/blog/views.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.template.response import TemplateResponse
|
||||
|
||||
from apps.blog.models import ArticlePage
|
||||
|
||||
RESULTS_PER_PAGE = 12
|
||||
MAX_QUERY_LENGTH = 200
|
||||
|
||||
|
||||
def search(request: HttpRequest) -> HttpResponse:
|
||||
query = request.GET.get("q", "").strip()[:MAX_QUERY_LENGTH]
|
||||
results_page = None
|
||||
paginator = None
|
||||
|
||||
if query:
|
||||
results = (
|
||||
ArticlePage.objects.live()
|
||||
.public()
|
||||
.select_related("author", "category")
|
||||
.prefetch_related("tags__metadata")
|
||||
.search(query)
|
||||
)
|
||||
paginator = Paginator(results, RESULTS_PER_PAGE)
|
||||
page_num = request.GET.get("page")
|
||||
try:
|
||||
results_page = paginator.page(page_num)
|
||||
except PageNotAnInteger:
|
||||
results_page = paginator.page(1)
|
||||
except EmptyPage:
|
||||
results_page = paginator.page(paginator.num_pages)
|
||||
|
||||
return TemplateResponse(
|
||||
request,
|
||||
"blog/search_results.html",
|
||||
{
|
||||
"query": query,
|
||||
"results": results_page,
|
||||
"paginator": paginator,
|
||||
},
|
||||
)
|
||||
@@ -1,7 +1,22 @@
|
||||
import django_filters
|
||||
from taggit.models import Tag
|
||||
from wagtail import hooks
|
||||
from wagtail.admin.filters import WagtailFilterSet
|
||||
from wagtail.admin.ui.components import Component
|
||||
from wagtail.admin.ui.tables import Column, DateColumn
|
||||
from wagtail.admin.ui.tables.pages import BulkActionsColumn, PageStatusColumn, PageTitleColumn
|
||||
from wagtail.admin.viewsets.pages import PageListingViewSet
|
||||
from wagtail.snippets.models import register_snippet
|
||||
from wagtail.snippets.views.snippets import SnippetViewSet
|
||||
|
||||
from apps.blog.models import TagMetadata
|
||||
from apps.authors.models import Author
|
||||
from apps.blog.models import ArticlePage, Category, TagMetadata
|
||||
|
||||
STATUS_CHOICES = [
|
||||
("live", "Published"),
|
||||
("draft", "Draft"),
|
||||
("scheduled", "Scheduled"),
|
||||
]
|
||||
|
||||
|
||||
class TagMetadataViewSet(SnippetViewSet):
|
||||
@@ -11,3 +26,106 @@ class TagMetadataViewSet(SnippetViewSet):
|
||||
|
||||
|
||||
register_snippet(TagMetadataViewSet)
|
||||
|
||||
|
||||
class CategoryViewSet(SnippetViewSet):
|
||||
model = Category
|
||||
icon = "folder-open-inverse"
|
||||
list_display = ["name", "slug", "show_in_nav", "sort_order"]
|
||||
list_filter = ["show_in_nav"]
|
||||
ordering = ["sort_order", "name"]
|
||||
|
||||
|
||||
register_snippet(CategoryViewSet)
|
||||
|
||||
|
||||
# ── Articles page listing ────────────────────────────────────────────────────
|
||||
|
||||
|
||||
class StatusFilter(django_filters.ChoiceFilter):
|
||||
def filter(self, qs, value): # noqa: A003
|
||||
if value == "live":
|
||||
return qs.filter(live=True)
|
||||
if value == "draft":
|
||||
return qs.filter(live=False, go_live_at__isnull=True)
|
||||
if value == "scheduled":
|
||||
return qs.filter(live=False, go_live_at__isnull=False)
|
||||
return qs
|
||||
|
||||
|
||||
class ArticleFilterSet(WagtailFilterSet):
|
||||
category = django_filters.ModelChoiceFilter(
|
||||
queryset=Category.objects.all(),
|
||||
empty_label="All categories",
|
||||
)
|
||||
author = django_filters.ModelChoiceFilter(
|
||||
queryset=Author.objects.all(),
|
||||
empty_label="All authors",
|
||||
)
|
||||
status = StatusFilter(
|
||||
choices=STATUS_CHOICES,
|
||||
empty_label="All statuses",
|
||||
)
|
||||
tag = django_filters.ModelChoiceFilter(
|
||||
field_name="tags",
|
||||
queryset=Tag.objects.all(),
|
||||
empty_label="All tags",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ArticlePage
|
||||
fields = []
|
||||
|
||||
|
||||
class ArticlePageListingViewSet(PageListingViewSet):
|
||||
model = ArticlePage
|
||||
icon = "doc-full"
|
||||
menu_label = "Articles"
|
||||
menu_order = 200
|
||||
add_to_admin_menu = True
|
||||
name = "articles"
|
||||
columns = [
|
||||
BulkActionsColumn("bulk_actions"),
|
||||
PageTitleColumn("title", classname="title"),
|
||||
Column("author", label="Author", sort_key="author__name"),
|
||||
Column("category", label="Category"),
|
||||
DateColumn("published_date", label="Published", sort_key="published_date"),
|
||||
PageStatusColumn("status", sort_key="live"),
|
||||
]
|
||||
filterset_class = ArticleFilterSet
|
||||
default_ordering = "-published_date"
|
||||
|
||||
|
||||
@hooks.register("register_admin_viewset")
|
||||
def register_article_listing():
|
||||
return ArticlePageListingViewSet("articles")
|
||||
|
||||
|
||||
# ── Dashboard panel ──────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
class ArticlesSummaryPanel(Component):
|
||||
name = "articles_summary"
|
||||
template_name = "blog/panels/articles_summary.html"
|
||||
order = 110
|
||||
|
||||
def get_context_data(self, parent_context):
|
||||
context = super().get_context_data(parent_context)
|
||||
context["drafts"] = (
|
||||
ArticlePage.objects.not_live()
|
||||
.order_by("-latest_revision_created_at")[:5]
|
||||
)
|
||||
context["scheduled"] = (
|
||||
ArticlePage.objects.filter(go_live_at__isnull=False, live=False)
|
||||
.order_by("go_live_at")[:5]
|
||||
)
|
||||
context["recent"] = (
|
||||
ArticlePage.objects.live()
|
||||
.order_by("-published_date")[:5]
|
||||
)
|
||||
return context
|
||||
|
||||
|
||||
@hooks.register("construct_homepage_panels")
|
||||
def add_articles_summary_panel(request, panels):
|
||||
panels.append(ArticlesSummaryPanel())
|
||||
|
||||
1
apps/comments/management/__init__.py
Normal file
1
apps/comments/management/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
apps/comments/management/commands/__init__.py
Normal file
1
apps/comments/management/commands/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
38
apps/comments/management/commands/purge_old_comment_data.py
Normal file
38
apps/comments/management/commands/purge_old_comment_data.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.comments.models import Comment, CommentReaction
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Nullify comment personal data for comments older than the retention window."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--months",
|
||||
type=int,
|
||||
default=24,
|
||||
help="Retention window in months before personal data is purged (default: 24).",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
months = options["months"]
|
||||
cutoff = timezone.now() - timedelta(days=30 * months)
|
||||
|
||||
purged = (
|
||||
Comment.objects.filter(created_at__lt=cutoff)
|
||||
.exclude(author_email="")
|
||||
.update(author_email="", ip_address=None)
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(f"Purged personal data for {purged} comment(s)."))
|
||||
|
||||
reactions_purged = (
|
||||
CommentReaction.objects.filter(created_at__lt=cutoff)
|
||||
.exclude(session_key="")
|
||||
.update(session_key="")
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(f"Purged session keys for {reactions_purged} reaction(s)."))
|
||||
27
apps/comments/migrations/0002_commentreaction.py
Normal file
27
apps/comments/migrations/0002_commentreaction.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 5.2.11 on 2026-03-03 22:49
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('comments', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CommentReaction',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('reaction_type', models.CharField(choices=[('heart', '❤️'), ('plus_one', '👍')], max_length=20)),
|
||||
('session_key', models.CharField(max_length=64)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('comment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reactions', to='comments.comment')),
|
||||
],
|
||||
options={
|
||||
'constraints': [models.UniqueConstraint(fields=('comment', 'reaction_type', 'session_key'), name='unique_comment_reaction_per_session')],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -23,3 +23,21 @@ class Comment(models.Model):
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Comment by {self.author_name}"
|
||||
|
||||
|
||||
class CommentReaction(models.Model):
|
||||
comment = models.ForeignKey(Comment, on_delete=models.CASCADE, related_name="reactions")
|
||||
reaction_type = models.CharField(max_length=20, choices=[("heart", "❤️"), ("plus_one", "👍")])
|
||||
session_key = models.CharField(max_length=64)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["comment", "reaction_type", "session_key"],
|
||||
name="unique_comment_reaction_per_session",
|
||||
)
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.reaction_type} on comment {self.comment_id}"
|
||||
|
||||
97
apps/comments/tests/test_admin.py
Normal file
97
apps/comments/tests/test_admin.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import pytest
|
||||
from django.test import override_settings
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
from apps.comments.models import Comment
|
||||
from apps.comments.wagtail_hooks import ApproveCommentBulkAction, CommentViewSet
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_comment_viewset_annotates_pending_in_article(rf, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(title="A", slug="a", author=author, summary="s", body=[("rich_text", "<p>body</p>")])
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
pending = Comment.objects.create(
|
||||
article=article,
|
||||
author_name="Pending",
|
||||
author_email="pending@example.com",
|
||||
body="Awaiting moderation",
|
||||
is_approved=False,
|
||||
)
|
||||
Comment.objects.create(
|
||||
article=article,
|
||||
author_name="Pending2",
|
||||
author_email="pending2@example.com",
|
||||
body="Awaiting moderation too",
|
||||
is_approved=False,
|
||||
)
|
||||
Comment.objects.create(
|
||||
article=article,
|
||||
author_name="Approved",
|
||||
author_email="approved@example.com",
|
||||
body="Already approved",
|
||||
is_approved=True,
|
||||
)
|
||||
|
||||
viewset = CommentViewSet()
|
||||
qs = viewset.get_queryset(rf.get("/cms/snippets/comments/comment/"))
|
||||
annotated = qs.get(pk=pending.pk)
|
||||
|
||||
assert annotated.pending_in_article == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_approve_action_marks_selected_pending_comments_as_approved(home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(title="A", slug="a", author=author, summary="s", body=[("rich_text", "<p>body</p>")])
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
pending = Comment.objects.create(
|
||||
article=article,
|
||||
author_name="Pending",
|
||||
author_email="pending@example.com",
|
||||
body="Awaiting moderation",
|
||||
is_approved=False,
|
||||
)
|
||||
approved = Comment.objects.create(
|
||||
article=article,
|
||||
author_name="Approved",
|
||||
author_email="approved@example.com",
|
||||
body="Already approved",
|
||||
is_approved=True,
|
||||
)
|
||||
|
||||
class _Context:
|
||||
model = Comment
|
||||
|
||||
updated, child_updates = ApproveCommentBulkAction.execute_action([pending, approved], self=_Context())
|
||||
pending.refresh_from_db()
|
||||
approved.refresh_from_db()
|
||||
|
||||
assert updated == 1
|
||||
assert child_updates == 0
|
||||
assert pending.is_approved is True
|
||||
assert approved.is_approved is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOWED_HOSTS=["testserver", "localhost", "127.0.0.1"])
|
||||
def test_comments_snippet_index_page_loads(client, django_user_model, home_page):
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin",
|
||||
email="admin@example.com",
|
||||
password="admin-pass",
|
||||
)
|
||||
client.force_login(admin)
|
||||
|
||||
response = client.get("/cms/snippets/comments/comment/")
|
||||
|
||||
assert response.status_code == 200
|
||||
40
apps/comments/tests/test_commands.py
Normal file
40
apps/comments/tests/test_commands.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from datetime import timedelta
|
||||
|
||||
import pytest
|
||||
from django.core.management import call_command
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
from apps.comments.models import Comment
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_purge_old_comment_data_clears_personal_fields(home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Article",
|
||||
slug="article",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
old_comment = Comment.objects.create(
|
||||
article=article,
|
||||
author_name="Old",
|
||||
author_email="old@example.com",
|
||||
body="legacy",
|
||||
ip_address="127.0.0.1",
|
||||
)
|
||||
Comment.objects.filter(pk=old_comment.pk).update(created_at=timezone.now() - timedelta(days=800))
|
||||
|
||||
call_command("purge_old_comment_data")
|
||||
|
||||
old_comment.refresh_from_db()
|
||||
assert old_comment.author_email == ""
|
||||
assert old_comment.ip_address is None
|
||||
@@ -1,5 +1,6 @@
|
||||
import pytest
|
||||
from django.core.cache import cache
|
||||
from django.test import override_settings
|
||||
|
||||
from apps.comments.forms import CommentForm
|
||||
|
||||
@@ -11,6 +12,7 @@ def test_comment_form_rejects_blank_body():
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(COMMENT_RATE_LIMIT_PER_MINUTE=3)
|
||||
def test_comment_rate_limit(client, article_page):
|
||||
cache.clear()
|
||||
payload = {
|
||||
|
||||
350
apps/comments/tests/test_v2.py
Normal file
350
apps/comments/tests/test_v2.py
Normal file
@@ -0,0 +1,350 @@
|
||||
"""Tests for Comments v2: HTMX, Turnstile, reactions, polling, CSP."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.core.cache import cache
|
||||
from django.core.management import call_command
|
||||
from django.test import override_settings
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
from apps.comments.models import Comment, CommentReaction
|
||||
|
||||
# ── Fixtures ──────────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _article(home_page):
|
||||
"""Create a published article with comments enabled."""
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Test Article",
|
||||
slug="test-article",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
return article
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def approved_comment(_article):
|
||||
return Comment.objects.create(
|
||||
article=_article,
|
||||
author_name="Alice",
|
||||
author_email="alice@example.com",
|
||||
body="Great article!",
|
||||
is_approved=True,
|
||||
)
|
||||
|
||||
|
||||
def _post_comment(client, article, extra=None, htmx=False):
|
||||
cache.clear()
|
||||
payload = {
|
||||
"article_id": article.id,
|
||||
"author_name": "Test",
|
||||
"author_email": "test@example.com",
|
||||
"body": "Hello world",
|
||||
"honeypot": "",
|
||||
}
|
||||
if extra:
|
||||
payload.update(extra)
|
||||
headers = {}
|
||||
if htmx:
|
||||
headers["HTTP_HX_REQUEST"] = "true"
|
||||
return client.post("/comments/post/", payload, **headers)
|
||||
|
||||
|
||||
# ── HTMX Response Contracts ──────────────────────────────────────────────────
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_htmx_post_returns_form_with_moderation_on_success(client, _article):
|
||||
"""HTMX POST with Turnstile disabled returns fresh form + moderation message."""
|
||||
resp = _post_comment(client, _article, htmx=True)
|
||||
assert resp.status_code == 200
|
||||
assert b"awaiting moderation" in resp.content
|
||||
# Response swaps the form container (contains form + success message)
|
||||
assert b"comment-form-container" in resp.content
|
||||
assert "HX-Request" in resp["Vary"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(TURNSTILE_SECRET_KEY="test-secret")
|
||||
def test_htmx_post_returns_form_plus_oob_comment_when_approved(client, _article):
|
||||
"""HTMX POST with successful Turnstile returns fresh form + OOB comment."""
|
||||
with patch("apps.comments.views._verify_turnstile", return_value=True):
|
||||
resp = _post_comment(client, _article, extra={"cf-turnstile-response": "tok"}, htmx=True)
|
||||
assert resp.status_code == 200
|
||||
content = resp.content.decode()
|
||||
# Fresh form container is the primary response
|
||||
assert "comment-form-container" in content
|
||||
assert "Comment posted!" in content
|
||||
# OOB swap appends the comment to #comments-list
|
||||
assert "hx-swap-oob" in content
|
||||
assert "Hello world" in content
|
||||
assert 'id="comments-empty-state" hx-swap-oob="delete"' in content
|
||||
comment = Comment.objects.get()
|
||||
assert comment.is_approved is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_htmx_post_returns_form_with_errors_on_invalid(client, _article):
|
||||
"""HTMX POST with invalid data returns form with errors (HTTP 200)."""
|
||||
cache.clear()
|
||||
resp = client.post(
|
||||
"/comments/post/",
|
||||
{"article_id": _article.id, "author_name": "T", "author_email": "t@t.com", "body": " ", "honeypot": ""},
|
||||
HTTP_HX_REQUEST="true",
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert b"comment-form-container" in resp.content
|
||||
assert b"Comment form errors" in resp.content
|
||||
assert "HX-Request" in resp["Vary"]
|
||||
assert Comment.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(TURNSTILE_SECRET_KEY="test-secret")
|
||||
def test_htmx_reply_returns_oob_reply_when_approved(client, _article, approved_comment):
|
||||
"""Approved reply via HTMX returns compact reply partial via OOB swap."""
|
||||
cache.clear()
|
||||
with patch("apps.comments.views._verify_turnstile", return_value=True):
|
||||
resp = client.post(
|
||||
"/comments/post/",
|
||||
{
|
||||
"article_id": _article.id,
|
||||
"parent_id": approved_comment.id,
|
||||
"author_name": "Replier",
|
||||
"author_email": "r@r.com",
|
||||
"body": "Nice reply",
|
||||
"honeypot": "",
|
||||
"cf-turnstile-response": "tok",
|
||||
},
|
||||
HTTP_HX_REQUEST="true",
|
||||
)
|
||||
content = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
# OOB targets a stable, explicit replies container for the parent comment.
|
||||
assert f'hx-swap-oob="beforeend:#replies-for-{approved_comment.id}"' in content
|
||||
# Verify content is rendered (not empty due to context mismatch)
|
||||
assert "Replier" in content
|
||||
assert "Nice reply" in content
|
||||
reply = Comment.objects.exclude(pk=approved_comment.pk).get()
|
||||
assert f"comment-{reply.id}" in content
|
||||
assert reply.parent_id == approved_comment.id
|
||||
assert reply.is_approved is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_non_htmx_post_still_redirects(client, _article):
|
||||
"""Non-HTMX POST continues to redirect (progressive enhancement)."""
|
||||
resp = _post_comment(client, _article)
|
||||
assert resp.status_code == 302
|
||||
assert resp["Location"].endswith("?commented=pending")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_htmx_error_with_tampered_parent_id_falls_back_to_main_form(client, _article):
|
||||
"""Tampered/non-numeric parent_id falls back to main form error response."""
|
||||
cache.clear()
|
||||
resp = client.post(
|
||||
"/comments/post/",
|
||||
{"article_id": _article.id, "parent_id": "not-a-number", "author_name": "T",
|
||||
"author_email": "t@t.com", "body": " ", "honeypot": ""},
|
||||
HTTP_HX_REQUEST="true",
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert b"comment-form-container" in resp.content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_htmx_invalid_reply_rerenders_reply_form_with_values(client, _article, approved_comment):
|
||||
"""Invalid reply keeps user input and returns the reply form container."""
|
||||
cache.clear()
|
||||
resp = client.post(
|
||||
"/comments/post/",
|
||||
{
|
||||
"article_id": _article.id,
|
||||
"parent_id": approved_comment.id,
|
||||
"author_name": "Reply User",
|
||||
"author_email": "reply@example.com",
|
||||
"body": " ",
|
||||
"honeypot": "",
|
||||
},
|
||||
HTTP_HX_REQUEST="true",
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
content = resp.content.decode()
|
||||
assert f'id="reply-form-container-{approved_comment.id}"' in content
|
||||
assert "Comment form errors" in content
|
||||
assert 'value="Reply User"' in content
|
||||
assert "reply@example.com" in content
|
||||
|
||||
|
||||
# ── Turnstile Integration ────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(TURNSTILE_SECRET_KEY="test-secret")
|
||||
def test_turnstile_failure_keeps_comment_unapproved(client, _article):
|
||||
"""When Turnstile verification fails, comment stays unapproved."""
|
||||
with patch("apps.comments.views._verify_turnstile", return_value=False):
|
||||
_post_comment(client, _article, extra={"cf-turnstile-response": "bad-tok"})
|
||||
comment = Comment.objects.get()
|
||||
assert comment.is_approved is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_turnstile_disabled_keeps_comment_unapproved(client, _article):
|
||||
"""When TURNSTILE_SECRET_KEY is empty, comment stays unapproved."""
|
||||
_post_comment(client, _article)
|
||||
comment = Comment.objects.get()
|
||||
assert comment.is_approved is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(TURNSTILE_SECRET_KEY="test-secret", TURNSTILE_EXPECTED_HOSTNAME="nohypeai.com")
|
||||
def test_turnstile_hostname_mismatch_rejects(client, _article):
|
||||
"""Turnstile hostname mismatch keeps comment unapproved."""
|
||||
mock_resp = type("R", (), {"json": lambda self: {"success": True, "hostname": "evil.com"}})()
|
||||
with patch("apps.comments.views.http_requests.post", return_value=mock_resp):
|
||||
_post_comment(client, _article, extra={"cf-turnstile-response": "tok"})
|
||||
comment = Comment.objects.get()
|
||||
assert comment.is_approved is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(TURNSTILE_SECRET_KEY="test-secret")
|
||||
def test_turnstile_timeout_fails_closed(client, _article):
|
||||
"""Network error during Turnstile verification fails closed."""
|
||||
with patch("apps.comments.views.http_requests.post", side_effect=Exception("timeout")):
|
||||
_post_comment(client, _article, extra={"cf-turnstile-response": "tok"})
|
||||
comment = Comment.objects.get()
|
||||
assert comment.is_approved is False
|
||||
|
||||
|
||||
# ── Polling ───────────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_comment_poll_returns_new_comments(_article, client, approved_comment):
|
||||
"""Poll endpoint returns only comments after the given ID."""
|
||||
resp = client.get(f"/comments/poll/{_article.id}/?after_id=0")
|
||||
assert resp.status_code == 200
|
||||
assert b"Alice" in resp.content
|
||||
|
||||
resp2 = client.get(f"/comments/poll/{_article.id}/?after_id={approved_comment.id}")
|
||||
assert resp2.status_code == 200
|
||||
assert b"Alice" not in resp2.content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_comment_poll_no_duplicates(_article, client, approved_comment):
|
||||
"""Polling with current latest ID returns empty."""
|
||||
resp = client.get(f"/comments/poll/{_article.id}/?after_id={approved_comment.id}")
|
||||
assert b"comment-" not in resp.content
|
||||
|
||||
|
||||
# ── Reactions ─────────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_react_creates_reaction(client, approved_comment):
|
||||
cache.clear()
|
||||
resp = client.post(
|
||||
f"/comments/{approved_comment.id}/react/",
|
||||
{"reaction_type": "heart"},
|
||||
HTTP_HX_REQUEST="true",
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert CommentReaction.objects.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_react_toggle_removes_reaction(client, approved_comment):
|
||||
"""Second reaction of same type removes it (toggle)."""
|
||||
cache.clear()
|
||||
client.post(f"/comments/{approved_comment.id}/react/", {"reaction_type": "heart"})
|
||||
client.post(f"/comments/{approved_comment.id}/react/", {"reaction_type": "heart"})
|
||||
assert CommentReaction.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_react_different_types_coexist(client, approved_comment):
|
||||
cache.clear()
|
||||
client.post(f"/comments/{approved_comment.id}/react/", {"reaction_type": "heart"})
|
||||
client.post(f"/comments/{approved_comment.id}/react/", {"reaction_type": "plus_one"})
|
||||
assert CommentReaction.objects.count() == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_react_invalid_type_returns_400(client, approved_comment):
|
||||
cache.clear()
|
||||
resp = client.post(f"/comments/{approved_comment.id}/react/", {"reaction_type": "invalid"})
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_react_on_unapproved_comment_returns_404(client, _article):
|
||||
cache.clear()
|
||||
comment = Comment.objects.create(
|
||||
article=_article, author_name="B", author_email="b@b.com", body="x", is_approved=False,
|
||||
)
|
||||
resp = client.post(f"/comments/{comment.id}/react/", {"reaction_type": "heart"})
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(REACTION_RATE_LIMIT_PER_MINUTE=2)
|
||||
def test_react_rate_limit(client, approved_comment):
|
||||
cache.clear()
|
||||
for _ in range(2):
|
||||
client.post(f"/comments/{approved_comment.id}/react/", {"reaction_type": "heart"})
|
||||
resp = client.post(f"/comments/{approved_comment.id}/react/", {"reaction_type": "plus_one"})
|
||||
assert resp.status_code == 429
|
||||
|
||||
|
||||
# ── CSP ───────────────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_csp_allows_turnstile(client, _article):
|
||||
"""CSP header includes Cloudflare Turnstile domains."""
|
||||
resp = client.get(_article.url)
|
||||
csp = resp.get("Content-Security-Policy", "")
|
||||
assert "challenges.cloudflare.com" in csp
|
||||
assert "frame-src" in csp
|
||||
|
||||
|
||||
# ── Purge Command Extension ──────────────────────────────────────────────────
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_purge_clears_reaction_session_keys(home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(title="A", slug="a", author=author, summary="s", body=[("rich_text", "<p>b</p>")])
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
comment = Comment.objects.create(
|
||||
article=article, author_name="X", author_email="x@x.com", body="y", is_approved=True,
|
||||
)
|
||||
reaction = CommentReaction.objects.create(
|
||||
comment=comment, reaction_type="heart", session_key="abc123",
|
||||
)
|
||||
CommentReaction.objects.filter(pk=reaction.pk).update(created_at=timezone.now() - timedelta(days=800))
|
||||
|
||||
call_command("purge_old_comment_data")
|
||||
reaction.refresh_from_db()
|
||||
assert reaction.session_key == ""
|
||||
@@ -1,5 +1,8 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.core.cache import cache
|
||||
from django.test import override_settings
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
@@ -27,9 +30,64 @@ def test_comment_post_flow(client, home_page):
|
||||
},
|
||||
)
|
||||
assert resp.status_code == 302
|
||||
assert resp["Location"].endswith("?commented=pending")
|
||||
assert Comment.objects.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_comment_post_redirect_banner_renders_on_article_page(client, home_page):
|
||||
cache.clear()
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(title="A", slug="a", author=author, summary="s", body=[("rich_text", "<p>body</p>")])
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.post(
|
||||
"/comments/post/",
|
||||
{
|
||||
"article_id": article.id,
|
||||
"author_name": "Test",
|
||||
"author_email": "test@example.com",
|
||||
"body": "Hello",
|
||||
"honeypot": "",
|
||||
},
|
||||
follow=True,
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert b"Your comment has been posted and is awaiting moderation." in resp.content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(TURNSTILE_SECRET_KEY="test-secret")
|
||||
def test_comment_post_redirect_banner_renders_approved_state(client, home_page):
|
||||
cache.clear()
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(title="A", slug="a", author=author, summary="s", body=[("rich_text", "<p>body</p>")])
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
with patch("apps.comments.views._verify_turnstile", return_value=True):
|
||||
resp = client.post(
|
||||
"/comments/post/",
|
||||
{
|
||||
"article_id": article.id,
|
||||
"author_name": "Test",
|
||||
"author_email": "test@example.com",
|
||||
"body": "Hello",
|
||||
"honeypot": "",
|
||||
"cf-turnstile-response": "tok",
|
||||
},
|
||||
follow=True,
|
||||
)
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert b"Comment posted!" in resp.content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_comment_post_rejected_when_comments_disabled(client, home_page):
|
||||
cache.clear()
|
||||
@@ -59,3 +117,100 @@ def test_comment_post_rejected_when_comments_disabled(client, home_page):
|
||||
)
|
||||
assert resp.status_code == 404
|
||||
assert Comment.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_invalid_comment_post_rerenders_form_with_errors(client, home_page):
|
||||
cache.clear()
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(title="A", slug="a", author=author, summary="s", body=[("rich_text", "<p>body</p>")])
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.post(
|
||||
"/comments/post/",
|
||||
{
|
||||
"article_id": article.id,
|
||||
"author_name": "Test",
|
||||
"author_email": "test@example.com",
|
||||
"body": " ",
|
||||
"honeypot": "",
|
||||
},
|
||||
)
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert b'aria-label="Comment form errors"' in resp.content
|
||||
assert b'value="Test"' in resp.content
|
||||
assert b"test@example.com" in resp.content
|
||||
assert Comment.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_comment_reply_depth_is_enforced(client, home_page):
|
||||
cache.clear()
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(title="A", slug="a", author=author, summary="s", body=[("rich_text", "<p>body</p>")])
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
parent = Comment.objects.create(
|
||||
article=article,
|
||||
author_name="Parent",
|
||||
author_email="p@example.com",
|
||||
body="Parent",
|
||||
is_approved=True,
|
||||
)
|
||||
child = Comment.objects.create(
|
||||
article=article,
|
||||
parent=parent,
|
||||
author_name="Child",
|
||||
author_email="c@example.com",
|
||||
body="Child",
|
||||
is_approved=True,
|
||||
)
|
||||
|
||||
resp = client.post(
|
||||
"/comments/post/",
|
||||
{
|
||||
"article_id": article.id,
|
||||
"parent_id": child.id,
|
||||
"author_name": "TooDeep",
|
||||
"author_email": "deep@example.com",
|
||||
"body": "Nope",
|
||||
"honeypot": "",
|
||||
},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert b"Reply depth exceeds the allowed limit" in resp.content
|
||||
assert Comment.objects.count() == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(TRUSTED_PROXY_IPS=[])
|
||||
def test_comment_uses_remote_addr_when_proxy_untrusted(client, home_page):
|
||||
cache.clear()
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(title="A", slug="a", author=author, summary="s", body=[("rich_text", "<p>body</p>")])
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
client.post(
|
||||
"/comments/post/",
|
||||
{
|
||||
"article_id": article.id,
|
||||
"author_name": "Test",
|
||||
"author_email": "test@example.com",
|
||||
"body": "Hello",
|
||||
"honeypot": "",
|
||||
},
|
||||
REMOTE_ADDR="10.0.0.1",
|
||||
HTTP_X_FORWARDED_FOR="203.0.113.7",
|
||||
)
|
||||
comment = Comment.objects.get()
|
||||
assert comment.ip_address == "10.0.0.1"
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from django.urls import path
|
||||
|
||||
from apps.comments.views import CommentCreateView
|
||||
from apps.comments.views import CommentCreateView, comment_poll, comment_react
|
||||
|
||||
urlpatterns = [
|
||||
path("post/", CommentCreateView.as_view(), name="comment_post"),
|
||||
path("poll/<int:article_id>/", comment_poll, name="comment_poll"),
|
||||
path("<int:comment_id>/react/", comment_react, name="comment_react"),
|
||||
]
|
||||
|
||||
@@ -1,22 +1,196 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from django.contrib import messages
|
||||
import logging
|
||||
|
||||
import requests as http_requests
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Count, Prefetch
|
||||
from django.http import HttpResponse, JsonResponse
|
||||
from django.shortcuts import get_object_or_404, redirect, render
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.cache import patch_vary_headers
|
||||
from django.views import View
|
||||
from django.views.decorators.http import require_GET, require_POST
|
||||
|
||||
from apps.blog.models import ArticlePage
|
||||
from apps.comments.forms import CommentForm
|
||||
from apps.comments.models import Comment
|
||||
from apps.comments.models import Comment, CommentReaction
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def client_ip_from_request(request) -> str:
|
||||
remote_addr = request.META.get("REMOTE_ADDR", "").strip()
|
||||
trusted_proxies = getattr(settings, "TRUSTED_PROXY_IPS", [])
|
||||
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR", "")
|
||||
if remote_addr in trusted_proxies and x_forwarded_for:
|
||||
return x_forwarded_for.split(",")[0].strip()
|
||||
return remote_addr
|
||||
|
||||
|
||||
def _is_htmx(request) -> bool:
|
||||
return request.headers.get("HX-Request") == "true"
|
||||
|
||||
|
||||
def _add_vary_header(response):
|
||||
patch_vary_headers(response, ["HX-Request"])
|
||||
return response
|
||||
|
||||
|
||||
def _comment_redirect(article: ArticlePage, *, approved: bool):
|
||||
state = "approved" if approved else "pending"
|
||||
return redirect(f"{article.url}?commented={state}")
|
||||
|
||||
|
||||
def _verify_turnstile(token: str, ip: str) -> bool:
|
||||
secret = getattr(settings, "TURNSTILE_SECRET_KEY", "")
|
||||
if not secret:
|
||||
return False
|
||||
try:
|
||||
resp = http_requests.post(
|
||||
"https://challenges.cloudflare.com/turnstile/v0/siteverify",
|
||||
data={"secret": secret, "response": token, "remoteip": ip},
|
||||
timeout=5,
|
||||
)
|
||||
result = resp.json()
|
||||
if not result.get("success"):
|
||||
return False
|
||||
expected_hostname = getattr(settings, "TURNSTILE_EXPECTED_HOSTNAME", "")
|
||||
if expected_hostname and result.get("hostname") != expected_hostname:
|
||||
logger.warning("Turnstile hostname mismatch: %s", result.get("hostname"))
|
||||
return False
|
||||
return True
|
||||
except Exception:
|
||||
logger.exception("Turnstile verification failed")
|
||||
return False
|
||||
|
||||
|
||||
def _turnstile_enabled() -> bool:
|
||||
return bool(getattr(settings, "TURNSTILE_SECRET_KEY", ""))
|
||||
|
||||
|
||||
def _get_session_key(request) -> str:
|
||||
session = getattr(request, "session", None)
|
||||
return (session.session_key or "") if session else ""
|
||||
|
||||
|
||||
def _turnstile_site_key():
|
||||
return getattr(settings, "TURNSTILE_SITE_KEY", "")
|
||||
|
||||
|
||||
def _annotate_reaction_counts(comments, session_key=""):
|
||||
"""Hydrate each comment with reaction_counts dict and user_reacted set."""
|
||||
comment_ids = [c.id for c in comments]
|
||||
if not comment_ids:
|
||||
return comments
|
||||
|
||||
counts_qs = (
|
||||
CommentReaction.objects.filter(comment_id__in=comment_ids)
|
||||
.values("comment_id", "reaction_type")
|
||||
.annotate(count=Count("id"))
|
||||
)
|
||||
counts_map = {}
|
||||
for row in counts_qs:
|
||||
counts_map.setdefault(row["comment_id"], {"heart": 0, "plus_one": 0})
|
||||
counts_map[row["comment_id"]][row["reaction_type"]] = row["count"]
|
||||
|
||||
user_map = {}
|
||||
if session_key:
|
||||
user_qs = CommentReaction.objects.filter(
|
||||
comment_id__in=comment_ids, session_key=session_key
|
||||
).values_list("comment_id", "reaction_type")
|
||||
for cid, rtype in user_qs:
|
||||
user_map.setdefault(cid, set()).add(rtype)
|
||||
|
||||
for comment in comments:
|
||||
comment.reaction_counts = counts_map.get(comment.id, {"heart": 0, "plus_one": 0})
|
||||
comment.user_reacted = user_map.get(comment.id, set())
|
||||
|
||||
return comments
|
||||
|
||||
|
||||
def _comment_template_context(comment, article, request):
|
||||
"""Build template context for a single comment partial."""
|
||||
_annotate_reaction_counts([comment], _get_session_key(request))
|
||||
return {
|
||||
"comment": comment,
|
||||
"page": article,
|
||||
"turnstile_site_key": _turnstile_site_key(),
|
||||
}
|
||||
|
||||
|
||||
class CommentCreateView(View):
|
||||
def _render_htmx_error(self, request, article, form):
|
||||
"""Return error form partial for HTMX — swaps the form container itself."""
|
||||
raw_parent_id = request.POST.get("parent_id")
|
||||
if raw_parent_id:
|
||||
try:
|
||||
parent_id = int(raw_parent_id)
|
||||
except (ValueError, TypeError):
|
||||
parent_id = None
|
||||
parent = Comment.objects.filter(pk=parent_id, article=article).first() if parent_id else None
|
||||
if parent:
|
||||
ctx = {
|
||||
"comment": parent, "page": article,
|
||||
"turnstile_site_key": _turnstile_site_key(),
|
||||
"reply_form_errors": form.errors,
|
||||
"reply_form": form,
|
||||
}
|
||||
return _add_vary_header(render(request, "comments/_reply_form.html", ctx))
|
||||
ctx = {
|
||||
"comment_form": form, "page": article,
|
||||
"turnstile_site_key": _turnstile_site_key(),
|
||||
}
|
||||
return _add_vary_header(render(request, "comments/_comment_form.html", ctx))
|
||||
|
||||
def _render_htmx_success(self, request, article, comment):
|
||||
"""Return fresh form + OOB-appended comment (if approved)."""
|
||||
tsk = _turnstile_site_key()
|
||||
oob_parts = []
|
||||
if comment.is_approved:
|
||||
ctx = _comment_template_context(comment, article, request)
|
||||
if comment.parent_id:
|
||||
# _reply.html expects 'reply' context key
|
||||
reply_ctx = ctx.copy()
|
||||
reply_ctx["reply"] = reply_ctx.pop("comment")
|
||||
comment_html = render_to_string("comments/_reply.html", reply_ctx, request)
|
||||
oob_parts.append(
|
||||
f'<div hx-swap-oob="beforeend:#replies-for-{comment.parent_id}">{comment_html}</div>'
|
||||
)
|
||||
else:
|
||||
comment_html = render_to_string("comments/_comment.html", ctx, request)
|
||||
oob_parts.append(f'<div hx-swap-oob="beforeend:#comments-list">{comment_html}</div>')
|
||||
# Ensure stale empty-state copy is removed when the first approved comment appears.
|
||||
oob_parts.append('<div id="comments-empty-state" hx-swap-oob="delete"></div>')
|
||||
|
||||
if comment.parent_id:
|
||||
parent = Comment.objects.filter(pk=comment.parent_id, article=article).first()
|
||||
msg = "Reply posted!" if comment.is_approved else "Your reply is awaiting moderation."
|
||||
form_html = render_to_string("comments/_reply_form.html", {
|
||||
"comment": parent, "page": article,
|
||||
"turnstile_site_key": tsk, "reply_success_message": msg,
|
||||
}, request)
|
||||
else:
|
||||
msg = (
|
||||
"Comment posted!" if comment.is_approved
|
||||
else "Your comment has been posted and is awaiting moderation."
|
||||
)
|
||||
form_html = render_to_string("comments/_comment_form.html", {
|
||||
"page": article, "turnstile_site_key": tsk, "success_message": msg,
|
||||
}, request)
|
||||
|
||||
resp = HttpResponse(form_html + "".join(oob_parts))
|
||||
return _add_vary_header(resp)
|
||||
|
||||
def post(self, request):
|
||||
ip = (request.META.get("HTTP_X_FORWARDED_FOR") or request.META.get("REMOTE_ADDR", "")).split(",")[0].strip()
|
||||
ip = client_ip_from_request(request)
|
||||
key = f"comment-rate:{ip}"
|
||||
count = cache.get(key, 0)
|
||||
if count >= 3:
|
||||
rate_limit = getattr(settings, "COMMENT_RATE_LIMIT_PER_MINUTE", 3)
|
||||
if count >= rate_limit:
|
||||
return HttpResponse(status=429)
|
||||
cache.set(key, count + 1, timeout=60)
|
||||
|
||||
@@ -27,16 +201,119 @@ class CommentCreateView(View):
|
||||
|
||||
if form.is_valid():
|
||||
if form.cleaned_data.get("honeypot"):
|
||||
return redirect(f"{article.url}?commented=1")
|
||||
if _is_htmx(request):
|
||||
return _add_vary_header(
|
||||
render(request, "comments/_comment_success.html", {"message": "Comment posted!"})
|
||||
)
|
||||
return _comment_redirect(article, approved=True)
|
||||
|
||||
# Turnstile verification
|
||||
turnstile_ok = False
|
||||
if _turnstile_enabled():
|
||||
token = request.POST.get("cf-turnstile-response", "")
|
||||
turnstile_ok = _verify_turnstile(token, ip)
|
||||
|
||||
comment = form.save(commit=False)
|
||||
comment.article = article
|
||||
comment.is_approved = turnstile_ok
|
||||
parent_id = form.cleaned_data.get("parent_id")
|
||||
if parent_id:
|
||||
comment.parent = Comment.objects.filter(pk=parent_id, article=article).first()
|
||||
comment.ip_address = ip or None
|
||||
try:
|
||||
comment.full_clean()
|
||||
except ValidationError:
|
||||
form.add_error(None, "Reply depth exceeds the allowed limit")
|
||||
if _is_htmx(request):
|
||||
return self._render_htmx_error(request, article, form)
|
||||
context = article.get_context(request)
|
||||
context.update({"page": article, "comment_form": form})
|
||||
return render(request, "blog/article_page.html", context, status=200)
|
||||
comment.save()
|
||||
messages.success(request, "Your comment is awaiting moderation")
|
||||
return redirect(f"{article.url}?commented=1")
|
||||
|
||||
messages.error(request, "Please correct the form errors")
|
||||
return redirect(article.url)
|
||||
if _is_htmx(request):
|
||||
return self._render_htmx_success(request, article, comment)
|
||||
|
||||
return _comment_redirect(article, approved=comment.is_approved)
|
||||
|
||||
if _is_htmx(request):
|
||||
return self._render_htmx_error(request, article, form)
|
||||
context = article.get_context(request)
|
||||
context.update({"page": article, "comment_form": form})
|
||||
return render(request, "blog/article_page.html", context, status=200)
|
||||
|
||||
|
||||
@require_GET
|
||||
def comment_poll(request, article_id):
|
||||
"""Return comments newer than after_id for HTMX polling."""
|
||||
article = get_object_or_404(ArticlePage, pk=article_id)
|
||||
after_id = request.GET.get("after_id", "0")
|
||||
try:
|
||||
after_id = int(after_id)
|
||||
except (ValueError, TypeError):
|
||||
after_id = 0
|
||||
|
||||
approved_replies = Comment.objects.filter(is_approved=True).select_related("parent")
|
||||
comments = list(
|
||||
article.comments.filter(is_approved=True, parent__isnull=True, id__gt=after_id)
|
||||
.prefetch_related(Prefetch("replies", queryset=approved_replies))
|
||||
.order_by("created_at", "id")
|
||||
)
|
||||
|
||||
_annotate_reaction_counts(comments, _get_session_key(request))
|
||||
|
||||
resp = render(request, "comments/_comment_list_inner.html", {
|
||||
"approved_comments": comments,
|
||||
"page": article,
|
||||
"turnstile_site_key": _turnstile_site_key(),
|
||||
})
|
||||
return _add_vary_header(resp)
|
||||
|
||||
|
||||
@require_POST
|
||||
def comment_react(request, comment_id):
|
||||
"""Toggle a reaction on a comment."""
|
||||
ip = client_ip_from_request(request)
|
||||
key = f"reaction-rate:{ip}"
|
||||
count = cache.get(key, 0)
|
||||
rate_limit = getattr(settings, "REACTION_RATE_LIMIT_PER_MINUTE", 20)
|
||||
if count >= rate_limit:
|
||||
return HttpResponse(status=429)
|
||||
cache.set(key, count + 1, timeout=60)
|
||||
|
||||
comment = get_object_or_404(Comment, pk=comment_id, is_approved=True)
|
||||
reaction_type = request.POST.get("reaction_type", "heart")
|
||||
if reaction_type not in ("heart", "plus_one"):
|
||||
return HttpResponse(status=400)
|
||||
|
||||
if not request.session.session_key:
|
||||
request.session.create()
|
||||
session_key = request.session.session_key
|
||||
|
||||
try:
|
||||
existing = CommentReaction.objects.filter(
|
||||
comment=comment, reaction_type=reaction_type, session_key=session_key
|
||||
).first()
|
||||
if existing:
|
||||
existing.delete()
|
||||
else:
|
||||
CommentReaction.objects.create(
|
||||
comment=comment, reaction_type=reaction_type, session_key=session_key
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
counts = {}
|
||||
for rt in ("heart", "plus_one"):
|
||||
counts[rt] = comment.reactions.filter(reaction_type=rt).count()
|
||||
user_reacted = set(
|
||||
comment.reactions.filter(session_key=session_key).values_list("reaction_type", flat=True)
|
||||
)
|
||||
|
||||
if _is_htmx(request):
|
||||
resp = render(request, "comments/_reactions.html", {
|
||||
"comment": comment, "counts": counts, "user_reacted": user_reacted,
|
||||
})
|
||||
return _add_vary_header(resp)
|
||||
|
||||
return JsonResponse({"counts": counts, "user_reacted": list(user_reacted)})
|
||||
|
||||
@@ -1,20 +1,101 @@
|
||||
from wagtail.admin.ui.tables import BooleanColumn
|
||||
from typing import Any, cast
|
||||
|
||||
from django.db.models import Count, Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.translation import ngettext
|
||||
from wagtail import hooks
|
||||
from wagtail.admin.ui.tables import BooleanColumn, Column
|
||||
from wagtail.snippets.bulk_actions.snippet_bulk_action import SnippetBulkAction
|
||||
from wagtail.snippets.models import register_snippet
|
||||
from wagtail.snippets.permissions import get_permission_name
|
||||
from wagtail.snippets.views.snippets import SnippetViewSet
|
||||
|
||||
from apps.comments.models import Comment
|
||||
|
||||
|
||||
class ApproveCommentBulkAction(SnippetBulkAction):
|
||||
display_name = _("Approve")
|
||||
action_type = "approve"
|
||||
aria_label = _("Approve selected comments")
|
||||
template_name = "comments/confirm_bulk_approve.html"
|
||||
action_priority = 20
|
||||
models = [Comment]
|
||||
|
||||
def check_perm(self, snippet):
|
||||
if getattr(self, "can_change_items", None) is None:
|
||||
self.can_change_items = self.request.user.has_perm(get_permission_name("change", self.model))
|
||||
return self.can_change_items
|
||||
|
||||
@classmethod
|
||||
def execute_action(cls, objects, **kwargs):
|
||||
updated = kwargs["self"].model.objects.filter(pk__in=[obj.pk for obj in objects], is_approved=False).update(
|
||||
is_approved=True
|
||||
)
|
||||
return updated, 0
|
||||
|
||||
def get_success_message(self, num_parent_objects, num_child_objects):
|
||||
return ngettext(
|
||||
"%(count)d comment approved.",
|
||||
"%(count)d comments approved.",
|
||||
num_parent_objects,
|
||||
) % {"count": num_parent_objects}
|
||||
|
||||
|
||||
class UnapproveCommentBulkAction(SnippetBulkAction):
|
||||
display_name = _("Unapprove")
|
||||
action_type = "unapprove"
|
||||
aria_label = _("Unapprove selected comments")
|
||||
template_name = "comments/confirm_bulk_unapprove.html"
|
||||
action_priority = 30
|
||||
models = [Comment]
|
||||
|
||||
def check_perm(self, snippet):
|
||||
if getattr(self, "can_change_items", None) is None:
|
||||
self.can_change_items = self.request.user.has_perm(get_permission_name("change", self.model))
|
||||
return self.can_change_items
|
||||
|
||||
@classmethod
|
||||
def execute_action(cls, objects, **kwargs):
|
||||
updated = kwargs["self"].model.objects.filter(pk__in=[obj.pk for obj in objects], is_approved=True).update(
|
||||
is_approved=False
|
||||
)
|
||||
return updated, 0
|
||||
|
||||
def get_success_message(self, num_parent_objects, num_child_objects):
|
||||
return ngettext(
|
||||
"%(count)d comment unapproved.",
|
||||
"%(count)d comments unapproved.",
|
||||
num_parent_objects,
|
||||
) % {"count": num_parent_objects}
|
||||
|
||||
|
||||
class CommentViewSet(SnippetViewSet):
|
||||
model = Comment
|
||||
queryset = Comment.objects.all()
|
||||
icon = "comment"
|
||||
list_display = ["author_name", "article", BooleanColumn("is_approved"), "created_at"]
|
||||
list_display = [
|
||||
"author_name",
|
||||
"article",
|
||||
BooleanColumn("is_approved"),
|
||||
Column("pending_in_article", label="Pending (article)"),
|
||||
"created_at",
|
||||
]
|
||||
list_filter = ["is_approved"]
|
||||
search_fields = ["author_name", "body"]
|
||||
add_to_admin_menu = True
|
||||
|
||||
def get_queryset(self, request):
|
||||
return super().get_queryset(request).select_related("article", "parent")
|
||||
|
||||
base_qs = self.model.objects.all().select_related("article", "parent")
|
||||
# mypy-django-plugin currently crashes on QuerySet.annotate() in this file.
|
||||
typed_qs = cast(Any, base_qs)
|
||||
return typed_qs.annotate(
|
||||
pending_in_article=Count(
|
||||
"article__comments",
|
||||
filter=Q(article__comments__is_approved=False),
|
||||
distinct=True,
|
||||
)
|
||||
)
|
||||
|
||||
register_snippet(CommentViewSet)
|
||||
hooks.register("register_bulk_action", ApproveCommentBulkAction)
|
||||
hooks.register("register_bulk_action", UnapproveCommentBulkAction)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from django.conf import settings as django_settings
|
||||
from wagtail.models import Site
|
||||
|
||||
from apps.core.models import SiteSettings
|
||||
@@ -6,4 +7,7 @@ from apps.core.models import SiteSettings
|
||||
def site_settings(request):
|
||||
site = Site.find_for_request(request)
|
||||
settings_obj = SiteSettings.for_site(site) if site else None
|
||||
return {"site_settings": settings_obj}
|
||||
return {
|
||||
"site_settings": settings_obj,
|
||||
"turnstile_site_key": getattr(django_settings, "TURNSTILE_SITE_KEY", ""),
|
||||
}
|
||||
|
||||
1
apps/core/management/__init__.py
Normal file
1
apps/core/management/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
apps/core/management/commands/__init__.py
Normal file
1
apps/core/management/commands/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
42
apps/core/management/commands/check_content_integrity.py
Normal file
42
apps/core/management/commands/check_content_integrity.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models.functions import Trim
|
||||
from wagtail.models import Site
|
||||
|
||||
from apps.blog.models import ArticlePage
|
||||
from apps.core.models import SiteSettings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Validate content-integrity constraints for live article pages."
|
||||
|
||||
def handle(self, *args, **options):
|
||||
errors: list[str] = []
|
||||
|
||||
missing_summary = ArticlePage.objects.live().annotate(summary_trimmed=Trim("summary")).filter(
|
||||
summary_trimmed=""
|
||||
)
|
||||
if missing_summary.exists():
|
||||
errors.append(f"{missing_summary.count()} live article(s) have an empty summary.")
|
||||
|
||||
missing_author = ArticlePage.objects.live().filter(author__isnull=True)
|
||||
if missing_author.exists():
|
||||
errors.append(f"{missing_author.count()} live article(s) have no author.")
|
||||
|
||||
default_site = Site.objects.filter(is_default_site=True).first()
|
||||
default_og_image = None
|
||||
if default_site:
|
||||
default_og_image = SiteSettings.for_site(default_site).default_og_image
|
||||
|
||||
if default_og_image is None:
|
||||
missing_hero = ArticlePage.objects.live().filter(hero_image__isnull=True)
|
||||
if missing_hero.exists():
|
||||
errors.append(
|
||||
f"{missing_hero.count()} live article(s) have no hero image and no site default OG image is set."
|
||||
)
|
||||
|
||||
if errors:
|
||||
raise CommandError("Content integrity check failed: " + " ".join(errors))
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Content integrity check passed."))
|
||||
213
apps/core/management/commands/seed_e2e_content.py
Normal file
213
apps/core/management/commands/seed_e2e_content.py
Normal file
@@ -0,0 +1,213 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.management.base import BaseCommand
|
||||
from taggit.models import Tag
|
||||
from wagtail.models import Page, Site
|
||||
|
||||
from apps.authors.models import Author
|
||||
from apps.blog.models import AboutPage, ArticleIndexPage, ArticlePage, HomePage, TagMetadata
|
||||
from apps.comments.models import Comment
|
||||
from apps.core.models import NavigationMenuItem, SiteSettings, SocialMediaLink
|
||||
from apps.legal.models import LegalIndexPage, LegalPage
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Seed deterministic content for E2E checks."
|
||||
|
||||
def handle(self, *args, **options):
|
||||
import datetime
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
root = Page.get_first_root_node()
|
||||
|
||||
home = HomePage.objects.child_of(root).first()
|
||||
if home is None:
|
||||
home = HomePage(title="No Hype AI", slug="nohype-home")
|
||||
root.add_child(instance=home)
|
||||
home.save_revision().publish()
|
||||
|
||||
article_index = ArticleIndexPage.objects.child_of(home).filter(slug="articles").first()
|
||||
if article_index is None:
|
||||
article_index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home.add_child(instance=article_index)
|
||||
article_index.save_revision().publish()
|
||||
|
||||
author, _ = Author.objects.get_or_create(
|
||||
slug="e2e-author",
|
||||
defaults={
|
||||
"name": "E2E Author",
|
||||
"bio": "Seeded nightly test author.",
|
||||
},
|
||||
)
|
||||
|
||||
# Primary article — comments enabled, used by nightly journey test
|
||||
# published_date is set explicitly to ensure deterministic ordering
|
||||
# (most recent first) so this article appears at the top of listings.
|
||||
now = timezone.now()
|
||||
article = ArticlePage.objects.child_of(article_index).filter(slug="nightly-playwright-journey").first()
|
||||
if article is None:
|
||||
article = ArticlePage(
|
||||
title="Nightly Playwright Journey",
|
||||
slug="nightly-playwright-journey",
|
||||
author=author,
|
||||
summary="Seeded article for nightly browser journey.",
|
||||
body=[("rich_text", "<p>Seeded article body for nightly browser checks.</p>")],
|
||||
comments_enabled=True,
|
||||
published_date=now,
|
||||
)
|
||||
article_index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
# Ensure deterministic ordering — primary article always newest
|
||||
ArticlePage.objects.filter(pk=article.pk).update(published_date=now)
|
||||
|
||||
# Seed one approved top-level comment on the primary article for reply E2E tests
|
||||
if not Comment.objects.filter(article=article, author_name="E2E Approved Commenter").exists():
|
||||
Comment.objects.create(
|
||||
article=article,
|
||||
author_name="E2E Approved Commenter",
|
||||
author_email="approved@example.com",
|
||||
body="This is a seeded approved comment for reply testing.",
|
||||
is_approved=True,
|
||||
)
|
||||
|
||||
|
||||
tag, _ = Tag.objects.get_or_create(name="AI Tools", slug="ai-tools")
|
||||
TagMetadata.objects.get_or_create(tag=tag, defaults={"colour": "cyan"})
|
||||
tagged_article = ArticlePage.objects.child_of(article_index).filter(slug="e2e-tagged-article").first()
|
||||
if tagged_article is None:
|
||||
tagged_article = ArticlePage(
|
||||
title="Tagged Article",
|
||||
slug="e2e-tagged-article",
|
||||
author=author,
|
||||
summary="An article with tags for E2E filter tests.",
|
||||
body=[("rich_text", "<p>This article is tagged with AI Tools.</p>")],
|
||||
comments_enabled=True,
|
||||
published_date=now - datetime.timedelta(hours=1),
|
||||
)
|
||||
article_index.add_child(instance=tagged_article)
|
||||
tagged_article.save_revision().publish()
|
||||
ArticlePage.objects.filter(pk=tagged_article.pk).update(
|
||||
published_date=now - datetime.timedelta(hours=1)
|
||||
)
|
||||
tagged_article.tags.add(tag)
|
||||
tagged_article.save()
|
||||
|
||||
# Third article — comments disabled
|
||||
no_comments_article = ArticlePage.objects.child_of(article_index).filter(slug="e2e-no-comments").first()
|
||||
if no_comments_article is None:
|
||||
no_comments_article = ArticlePage(
|
||||
title="No Comments Article",
|
||||
slug="e2e-no-comments",
|
||||
author=author,
|
||||
summary="An article with comments disabled.",
|
||||
body=[("rich_text", "<p>Comments are disabled on this one.</p>")],
|
||||
comments_enabled=False,
|
||||
published_date=now - datetime.timedelta(hours=2),
|
||||
)
|
||||
article_index.add_child(instance=no_comments_article)
|
||||
# Explicitly persist False after add_child (which internally calls save())
|
||||
# to guard against any field reset in the page tree insertion path.
|
||||
ArticlePage.objects.filter(pk=no_comments_article.pk).update(comments_enabled=False)
|
||||
no_comments_article.comments_enabled = False
|
||||
no_comments_article.save_revision().publish()
|
||||
ArticlePage.objects.filter(pk=no_comments_article.pk).update(
|
||||
published_date=now - datetime.timedelta(hours=2)
|
||||
)
|
||||
|
||||
# About page
|
||||
if not AboutPage.objects.child_of(home).filter(slug="about").exists():
|
||||
about = AboutPage(
|
||||
title="About",
|
||||
slug="about",
|
||||
mission_statement="Honest AI coding tool reviews for developers.",
|
||||
body="<p>We benchmark, so you don't have to.</p>",
|
||||
)
|
||||
home.add_child(instance=about)
|
||||
about.save_revision().publish()
|
||||
|
||||
# Legal pages
|
||||
legal_index = LegalIndexPage.objects.child_of(home).filter(slug="legal").first()
|
||||
if legal_index is None:
|
||||
legal_index = LegalIndexPage(title="Legal", slug="legal")
|
||||
home.add_child(instance=legal_index)
|
||||
legal_index.save_revision().publish()
|
||||
|
||||
if not LegalPage.objects.child_of(legal_index).filter(slug="privacy-policy").exists():
|
||||
privacy = LegalPage(
|
||||
title="Privacy Policy",
|
||||
slug="privacy-policy",
|
||||
body="<p>We take your privacy seriously.</p>",
|
||||
last_updated=datetime.date.today(),
|
||||
show_in_footer=True,
|
||||
)
|
||||
legal_index.add_child(instance=privacy)
|
||||
privacy.save_revision().publish()
|
||||
|
||||
# Point every existing Site at the real home page and mark exactly one
|
||||
# as the default. Wagtail's initial migration creates a localhost:80
|
||||
# site that matches incoming requests by hostname before the
|
||||
# is_default_site fallback is ever reached, so we must update *all*
|
||||
# sites, not just the is_default_site one.
|
||||
Site.objects.all().update(root_page=home, site_name="No Hype AI", is_default_site=False)
|
||||
site = Site.objects.first()
|
||||
if site is None:
|
||||
site = Site(hostname="localhost", port=80)
|
||||
site.is_default_site = True
|
||||
site.save()
|
||||
|
||||
# Navigation menu items and social links — always reconcile to
|
||||
# match the pages we just created (the data migration may have
|
||||
# seeded partial items before these pages existed).
|
||||
settings, _ = SiteSettings.objects.get_or_create(site=site)
|
||||
NavigationMenuItem.objects.filter(settings=settings).delete()
|
||||
article_index_page = ArticleIndexPage.objects.child_of(home).filter(slug="articles").first()
|
||||
about_page = AboutPage.objects.child_of(home).filter(slug="about").first()
|
||||
nav_items = [
|
||||
NavigationMenuItem(settings=settings, link_page=home, link_title="Home", sort_order=0),
|
||||
]
|
||||
if article_index_page:
|
||||
nav_items.append(
|
||||
NavigationMenuItem(
|
||||
settings=settings, link_page=article_index_page,
|
||||
link_title="Articles", sort_order=1,
|
||||
)
|
||||
)
|
||||
if about_page:
|
||||
nav_items.append(
|
||||
NavigationMenuItem(
|
||||
settings=settings, link_page=about_page,
|
||||
link_title="About", sort_order=2,
|
||||
)
|
||||
)
|
||||
NavigationMenuItem.objects.bulk_create(nav_items)
|
||||
|
||||
SocialMediaLink.objects.filter(settings=settings).delete()
|
||||
SocialMediaLink.objects.bulk_create(
|
||||
[
|
||||
SocialMediaLink(
|
||||
settings=settings, platform="twitter",
|
||||
url="https://twitter.com/nohypeai",
|
||||
label="Twitter (X)", sort_order=0,
|
||||
),
|
||||
SocialMediaLink(
|
||||
settings=settings, platform="rss",
|
||||
url="/feed/", label="RSS Feed", sort_order=1,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
# Admin user for E2E admin tests — only when E2E_MODE is set
|
||||
if os.environ.get("E2E_MODE") and not User.objects.filter(username="e2e-admin").exists():
|
||||
User.objects.create_superuser(
|
||||
username="e2e-admin",
|
||||
email="e2e-admin@example.com",
|
||||
password="e2e-admin-pass",
|
||||
)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Seeded E2E content."))
|
||||
@@ -1,5 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import secrets
|
||||
from typing import Any, cast
|
||||
|
||||
from django.contrib.messages import get_messages
|
||||
|
||||
from .consent import ConsentService
|
||||
|
||||
|
||||
@@ -10,3 +15,53 @@ class ConsentMiddleware:
|
||||
def __call__(self, request):
|
||||
request.consent = ConsentService.get_consent(request)
|
||||
return self.get_response(request)
|
||||
|
||||
|
||||
class SecurityHeadersMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
ADMIN_PREFIXES = ("/cms/", "/django-admin/")
|
||||
|
||||
def __call__(self, request):
|
||||
nonce = secrets.token_urlsafe(16)
|
||||
request.csp_nonce = nonce
|
||||
response = self.get_response(request)
|
||||
if request.path.startswith(self.ADMIN_PREFIXES):
|
||||
return response
|
||||
response["Content-Security-Policy"] = (
|
||||
f"default-src 'self'; "
|
||||
f"script-src 'self' 'nonce-{nonce}' https://challenges.cloudflare.com; "
|
||||
"style-src 'self' https://fonts.googleapis.com; "
|
||||
"img-src 'self' data: blob:; "
|
||||
"font-src 'self' https://fonts.gstatic.com; "
|
||||
"connect-src 'self' https://challenges.cloudflare.com; "
|
||||
"frame-src https://challenges.cloudflare.com; "
|
||||
"object-src 'none'; "
|
||||
"base-uri 'self'; "
|
||||
"frame-ancestors 'self'"
|
||||
)
|
||||
response["Permissions-Policy"] = "camera=(), microphone=(), geolocation=()"
|
||||
return response
|
||||
|
||||
|
||||
class AdminMessageGuardMiddleware:
|
||||
ADMIN_PREFIXES = ("/cms/", "/django-admin/")
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
# The public site has no legitimate use of Django's shared flash queue.
|
||||
# Drain any stale admin messages before frontend rendering can see them.
|
||||
if not request.path.startswith(self.ADMIN_PREFIXES):
|
||||
storage = cast(Any, get_messages(request))
|
||||
list(storage)
|
||||
storage._queued_messages = []
|
||||
storage._loaded_data = []
|
||||
for sub_storage in getattr(storage, "storages", []):
|
||||
sub_storage._queued_messages = []
|
||||
sub_storage._loaded_data = []
|
||||
sub_storage.used = True
|
||||
storage.used = True
|
||||
return self.get_response(request)
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
# Generated by Django 5.2.11 on 2026-03-02 18:39
|
||||
|
||||
import django.db.models.deletion
|
||||
import modelcluster.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0001_initial'),
|
||||
('wagtailcore', '0094_alter_page_locale'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='sitesettings',
|
||||
name='copyright_text',
|
||||
field=models.CharField(default='No Hype AI. All rights reserved.', max_length=200),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitesettings',
|
||||
name='footer_description',
|
||||
field=models.TextField(blank=True, default='In-depth reviews and benchmarks of the latest AI coding tools.\nHonest analysis for developers.'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitesettings',
|
||||
name='site_name',
|
||||
field=models.CharField(default='NO HYPE AI', max_length=100),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitesettings',
|
||||
name='tagline',
|
||||
field=models.CharField(default='Honest AI tool reviews for developers.', max_length=200),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='NavigationMenuItem',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
|
||||
('link_url', models.URLField(blank=True, default='', help_text='External URL (used only when no page is selected).')),
|
||||
('link_title', models.CharField(blank=True, default='', help_text='Override the display text. If blank, the page title is used.', max_length=100)),
|
||||
('open_in_new_tab', models.BooleanField(default=False)),
|
||||
('show_in_header', models.BooleanField(default=True)),
|
||||
('show_in_footer', models.BooleanField(default=True)),
|
||||
('link_page', models.ForeignKey(blank=True, help_text='Link to an internal page. If unpublished, the link is hidden automatically.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.page')),
|
||||
('settings', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='navigation_items', to='core.sitesettings')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['sort_order'],
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SocialMediaLink',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
|
||||
('platform', models.CharField(choices=[('twitter', 'Twitter / X'), ('github', 'GitHub'), ('rss', 'RSS Feed'), ('linkedin', 'LinkedIn'), ('youtube', 'YouTube'), ('mastodon', 'Mastodon'), ('bluesky', 'Bluesky')], max_length=30)),
|
||||
('url', models.URLField()),
|
||||
('label', models.CharField(blank=True, default='', help_text='Display label. If blank, the platform name is used.', max_length=100)),
|
||||
('settings', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='social_links', to='core.sitesettings')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['sort_order'],
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
]
|
||||
105
apps/core/migrations/0003_seed_navigation_data.py
Normal file
105
apps/core/migrations/0003_seed_navigation_data.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# Generated by Django 5.2.11 on 2026-03-02 18:39
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def seed_navigation_data(apps, schema_editor):
|
||||
Site = apps.get_model("wagtailcore", "Site")
|
||||
SiteSettings = apps.get_model("core", "SiteSettings")
|
||||
NavigationMenuItem = apps.get_model("core", "NavigationMenuItem")
|
||||
SocialMediaLink = apps.get_model("core", "SocialMediaLink")
|
||||
Page = apps.get_model("wagtailcore", "Page")
|
||||
|
||||
for site in Site.objects.all():
|
||||
settings, _ = SiteSettings.objects.get_or_create(site=site)
|
||||
|
||||
# Only seed if no nav items exist yet
|
||||
if NavigationMenuItem.objects.filter(settings=settings).exists():
|
||||
continue
|
||||
|
||||
root_page = site.root_page
|
||||
if not root_page:
|
||||
continue
|
||||
|
||||
# Find pages by slug under the site root using tree path
|
||||
home_page = root_page
|
||||
# In Wagtail's treebeard, direct children share the root's path prefix
|
||||
articles_page = Page.objects.filter(
|
||||
depth=root_page.depth + 1,
|
||||
path__startswith=root_page.path,
|
||||
slug__startswith="articles",
|
||||
).first()
|
||||
about_page = Page.objects.filter(
|
||||
depth=root_page.depth + 1,
|
||||
path__startswith=root_page.path,
|
||||
slug__startswith="about",
|
||||
).first()
|
||||
|
||||
nav_items = []
|
||||
if home_page:
|
||||
nav_items.append(
|
||||
NavigationMenuItem(
|
||||
settings=settings,
|
||||
link_page=home_page,
|
||||
link_title="Home",
|
||||
show_in_header=True,
|
||||
show_in_footer=True,
|
||||
sort_order=0,
|
||||
)
|
||||
)
|
||||
if articles_page:
|
||||
nav_items.append(
|
||||
NavigationMenuItem(
|
||||
settings=settings,
|
||||
link_page=articles_page,
|
||||
link_title="Articles",
|
||||
show_in_header=True,
|
||||
show_in_footer=True,
|
||||
sort_order=1,
|
||||
)
|
||||
)
|
||||
if about_page:
|
||||
nav_items.append(
|
||||
NavigationMenuItem(
|
||||
settings=settings,
|
||||
link_page=about_page,
|
||||
link_title="About",
|
||||
show_in_header=True,
|
||||
show_in_footer=True,
|
||||
sort_order=2,
|
||||
)
|
||||
)
|
||||
NavigationMenuItem.objects.bulk_create(nav_items)
|
||||
|
||||
# Social links
|
||||
if not SocialMediaLink.objects.filter(settings=settings).exists():
|
||||
SocialMediaLink.objects.bulk_create(
|
||||
[
|
||||
SocialMediaLink(
|
||||
settings=settings,
|
||||
platform="twitter",
|
||||
url="https://twitter.com/nohypeai",
|
||||
label="Twitter (X)",
|
||||
sort_order=0,
|
||||
),
|
||||
SocialMediaLink(
|
||||
settings=settings,
|
||||
platform="rss",
|
||||
url="/feed/",
|
||||
label="RSS Feed",
|
||||
sort_order=1,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0002_sitesettings_copyright_text_and_more'),
|
||||
('wagtailcore', '0094_alter_page_locale'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(seed_navigation_data, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 5.2.11 on 2026-03-02 19:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0003_seed_navigation_data'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='navigationmenuitem',
|
||||
name='link_url',
|
||||
field=models.CharField(blank=True, default='', help_text='URL or path (used only when no page is selected).', max_length=500),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='socialmedialink',
|
||||
name='url',
|
||||
field=models.CharField(help_text='URL or path (e.g. https://twitter.com/… or /feed/).', max_length=500),
|
||||
),
|
||||
]
|
||||
@@ -1,10 +1,24 @@
|
||||
from django.db import models
|
||||
from django.db.models import SET_NULL
|
||||
from modelcluster.fields import ParentalKey
|
||||
from modelcluster.models import ClusterableModel
|
||||
from wagtail.admin.panels import FieldPanel, InlinePanel, MultiFieldPanel
|
||||
from wagtail.contrib.settings.models import BaseSiteSetting, register_setting
|
||||
from wagtail.models import Orderable
|
||||
|
||||
SOCIAL_ICON_CHOICES = [
|
||||
("twitter", "Twitter / X"),
|
||||
("github", "GitHub"),
|
||||
("rss", "RSS Feed"),
|
||||
("linkedin", "LinkedIn"),
|
||||
("youtube", "YouTube"),
|
||||
("mastodon", "Mastodon"),
|
||||
("bluesky", "Bluesky"),
|
||||
]
|
||||
|
||||
|
||||
@register_setting
|
||||
class SiteSettings(BaseSiteSetting):
|
||||
class SiteSettings(ClusterableModel, BaseSiteSetting):
|
||||
default_og_image = models.ForeignKey(
|
||||
"wagtailimages.Image",
|
||||
null=True,
|
||||
@@ -19,3 +33,141 @@ class SiteSettings(BaseSiteSetting):
|
||||
on_delete=SET_NULL,
|
||||
related_name="+",
|
||||
)
|
||||
|
||||
# Branding
|
||||
site_name = models.CharField(max_length=100, default="NO HYPE AI")
|
||||
tagline = models.CharField(
|
||||
max_length=200,
|
||||
default="Honest AI tool reviews for developers.",
|
||||
)
|
||||
footer_description = models.TextField(
|
||||
default="In-depth reviews and benchmarks of the latest AI coding tools.\nHonest analysis for developers.",
|
||||
blank=True,
|
||||
)
|
||||
copyright_text = models.CharField(
|
||||
max_length=200,
|
||||
default="No Hype AI. All rights reserved.",
|
||||
)
|
||||
|
||||
panels = [
|
||||
MultiFieldPanel(
|
||||
[
|
||||
FieldPanel("site_name"),
|
||||
FieldPanel("tagline"),
|
||||
FieldPanel("footer_description"),
|
||||
FieldPanel("copyright_text"),
|
||||
],
|
||||
heading="Branding",
|
||||
),
|
||||
MultiFieldPanel(
|
||||
[
|
||||
FieldPanel("default_og_image"),
|
||||
FieldPanel("privacy_policy_page"),
|
||||
],
|
||||
heading="SEO & Legal",
|
||||
),
|
||||
InlinePanel("navigation_items", label="Navigation Menu Items"),
|
||||
InlinePanel("social_links", label="Social Media Links"),
|
||||
]
|
||||
|
||||
|
||||
class NavigationMenuItem(Orderable):
|
||||
settings = ParentalKey(
|
||||
SiteSettings,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="navigation_items",
|
||||
)
|
||||
link_page = models.ForeignKey(
|
||||
"wagtailcore.Page",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=SET_NULL,
|
||||
related_name="+",
|
||||
help_text="Link to an internal page. If unpublished, the link is hidden automatically.",
|
||||
)
|
||||
link_url = models.CharField(
|
||||
max_length=500,
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="URL or path (used only when no page is selected).",
|
||||
)
|
||||
link_title = models.CharField(
|
||||
max_length=100,
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="Override the display text. If blank, the page title is used.",
|
||||
)
|
||||
open_in_new_tab = models.BooleanField(default=False)
|
||||
show_in_header = models.BooleanField(default=True)
|
||||
show_in_footer = models.BooleanField(default=True)
|
||||
|
||||
panels = [
|
||||
FieldPanel("link_page"),
|
||||
FieldPanel("link_url"),
|
||||
FieldPanel("link_title"),
|
||||
FieldPanel("open_in_new_tab"),
|
||||
FieldPanel("show_in_header"),
|
||||
FieldPanel("show_in_footer"),
|
||||
]
|
||||
|
||||
@property
|
||||
def title(self):
|
||||
if self.link_title:
|
||||
return self.link_title
|
||||
if self.link_page:
|
||||
return self.link_page.title
|
||||
return ""
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
if self.link_page:
|
||||
return self.link_page.url
|
||||
return self.link_url
|
||||
|
||||
@property
|
||||
def is_live(self):
|
||||
"""Return False if linked to an unpublished/non-live page."""
|
||||
if self.link_page_id:
|
||||
return self.link_page.live
|
||||
return bool(self.link_url)
|
||||
|
||||
class Meta(Orderable.Meta):
|
||||
pass
|
||||
|
||||
|
||||
class SocialMediaLink(Orderable):
|
||||
settings = ParentalKey(
|
||||
SiteSettings,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="social_links",
|
||||
)
|
||||
platform = models.CharField(
|
||||
max_length=30,
|
||||
choices=SOCIAL_ICON_CHOICES,
|
||||
)
|
||||
url = models.CharField(max_length=500, help_text="URL or path (e.g. https://twitter.com/… or /feed/).")
|
||||
label = models.CharField(
|
||||
max_length=100,
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="Display label. If blank, the platform name is used.",
|
||||
)
|
||||
|
||||
panels = [
|
||||
FieldPanel("platform"),
|
||||
FieldPanel("url"),
|
||||
FieldPanel("label"),
|
||||
]
|
||||
|
||||
@property
|
||||
def display_label(self):
|
||||
if self.label:
|
||||
return self.label
|
||||
return dict(SOCIAL_ICON_CHOICES).get(self.platform, self.platform)
|
||||
|
||||
@property
|
||||
def icon_template(self):
|
||||
return f"components/icons/{self.platform}.html"
|
||||
|
||||
class Meta(Orderable.Meta):
|
||||
pass
|
||||
|
||||
@@ -4,7 +4,8 @@ from django import template
|
||||
from django.utils.safestring import mark_safe
|
||||
from wagtail.models import Site
|
||||
|
||||
from apps.blog.models import TagMetadata
|
||||
from apps.blog.models import ArticleIndexPage, Category, TagMetadata, get_auto_tag_colour_css
|
||||
from apps.core.models import SiteSettings
|
||||
from apps.legal.models import LegalPage
|
||||
|
||||
register = template.Library()
|
||||
@@ -20,11 +21,73 @@ def get_legal_pages(context):
|
||||
return pages
|
||||
|
||||
|
||||
@register.simple_tag
|
||||
@register.filter
|
||||
def get_tag_css(tag):
|
||||
@register.simple_tag(takes_context=True)
|
||||
def get_nav_items(context, location="header"):
|
||||
request = context.get("request")
|
||||
site = Site.find_for_request(request) if request else None
|
||||
settings = SiteSettings.for_site(site) if site else None
|
||||
if not settings:
|
||||
return []
|
||||
items = settings.navigation_items.all()
|
||||
if location == "header":
|
||||
items = items.filter(show_in_header=True)
|
||||
elif location == "footer":
|
||||
items = items.filter(show_in_footer=True)
|
||||
return [item for item in items if item.is_live]
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def get_social_links(context):
|
||||
request = context.get("request")
|
||||
site = Site.find_for_request(request) if request else None
|
||||
settings = SiteSettings.for_site(site) if site else None
|
||||
if not settings:
|
||||
return []
|
||||
return list(settings.social_links.all())
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def get_categories_nav(context):
|
||||
request = context.get("request")
|
||||
if not request:
|
||||
return []
|
||||
site = Site.find_for_request(request) if request else None
|
||||
index_qs = ArticleIndexPage.objects.live().public()
|
||||
if site:
|
||||
index_qs = index_qs.in_site(site)
|
||||
index_page = index_qs.first()
|
||||
if not index_page:
|
||||
return []
|
||||
categories = Category.objects.filter(show_in_nav=True).order_by("sort_order", "name")
|
||||
return [
|
||||
{
|
||||
"name": category.name,
|
||||
"slug": category.slug,
|
||||
"url": index_page.get_category_url(category),
|
||||
"article_count": index_page.get_articles().filter(category=category).count(),
|
||||
}
|
||||
for category in categories
|
||||
]
|
||||
|
||||
|
||||
def _resolve_tag_css(tag) -> dict[str, str]:
|
||||
"""Return CSS classes for a tag, using TagMetadata if set, else auto-colour."""
|
||||
meta = getattr(tag, "metadata", None)
|
||||
if meta is None:
|
||||
meta = TagMetadata.objects.filter(tag=tag).first()
|
||||
classes = meta.get_css_classes() if meta else TagMetadata.get_fallback_css()
|
||||
if meta:
|
||||
return meta.get_css_classes()
|
||||
return get_auto_tag_colour_css(tag.name)
|
||||
|
||||
|
||||
@register.simple_tag
|
||||
@register.filter
|
||||
def get_tag_css(tag):
|
||||
classes = _resolve_tag_css(tag)
|
||||
return mark_safe(f"{classes['bg']} {classes['text']}")
|
||||
|
||||
|
||||
@register.filter
|
||||
def get_tag_border_css(tag):
|
||||
classes = _resolve_tag_css(tag)
|
||||
return mark_safe(classes.get("border", ""))
|
||||
|
||||
@@ -11,16 +11,33 @@ from apps.core.models import SiteSettings
|
||||
register = template.Library()
|
||||
|
||||
|
||||
def _article_image_url(request, article) -> str:
|
||||
site_settings = SiteSettings.for_request(request)
|
||||
image = article.hero_image or site_settings.default_og_image
|
||||
if isinstance(image, Image):
|
||||
rendition = image.get_rendition("fill-1200x630")
|
||||
return request.build_absolute_uri(rendition.url)
|
||||
return ""
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def canonical_url(context, page=None) -> str:
|
||||
request = context["request"]
|
||||
target = page or context.get("page")
|
||||
if target and hasattr(target, "get_full_url"):
|
||||
return target.get_full_url(request)
|
||||
return request.build_absolute_uri()
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def article_og_image_url(context, article) -> str:
|
||||
return _article_image_url(context["request"], article)
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def article_json_ld(context, article):
|
||||
request = context["request"]
|
||||
site_settings = SiteSettings.for_request(request)
|
||||
image = article.hero_image or site_settings.default_og_image
|
||||
image_url = ""
|
||||
if isinstance(image, Image):
|
||||
rendition = image.get_rendition("fill-1200x630")
|
||||
image_url = request.build_absolute_uri(rendition.url)
|
||||
|
||||
nonce = getattr(request, "csp_nonce", "")
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "Article",
|
||||
@@ -30,8 +47,12 @@ def article_json_ld(context, article):
|
||||
"dateModified": article.last_published_at.isoformat() if article.last_published_at else "",
|
||||
"description": article.search_description or article.summary,
|
||||
"url": article.get_full_url(request),
|
||||
"image": image_url,
|
||||
"image": _article_image_url(request, article),
|
||||
}
|
||||
return mark_safe(
|
||||
'<script type="application/ld+json">' + json.dumps(data, ensure_ascii=True) + "</script>"
|
||||
'<script type="application/ld+json" nonce="'
|
||||
+ nonce
|
||||
+ '">'
|
||||
+ json.dumps(data, ensure_ascii=True)
|
||||
+ "</script>"
|
||||
)
|
||||
|
||||
58
apps/core/tests/test_commands.py
Normal file
58
apps/core/tests/test_commands.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import pytest
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
from apps.blog.models import AboutPage, ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_check_content_integrity_passes_when_requirements_met(home_page):
|
||||
call_command("check_content_integrity")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_check_content_integrity_fails_for_blank_summary(home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Article",
|
||||
slug="article",
|
||||
author=author,
|
||||
summary=" ",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
# Simulate legacy/bad data by bypassing model save() auto-summary fallback.
|
||||
ArticlePage.objects.filter(pk=article.pk).update(summary=" ")
|
||||
|
||||
with pytest.raises(CommandError, match="empty summary"):
|
||||
call_command("check_content_integrity")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_seed_e2e_content_creates_expected_pages():
|
||||
call_command("seed_e2e_content")
|
||||
|
||||
assert ArticlePage.objects.filter(slug="nightly-playwright-journey").exists()
|
||||
assert ArticlePage.objects.filter(slug="e2e-tagged-article").exists()
|
||||
assert ArticlePage.objects.filter(slug="e2e-no-comments").exists()
|
||||
assert AboutPage.objects.filter(slug="about").exists()
|
||||
|
||||
# Tagged article must carry the seeded tag
|
||||
tagged = ArticlePage.objects.get(slug="e2e-tagged-article")
|
||||
assert tagged.tags.filter(slug="ai-tools").exists()
|
||||
|
||||
# No-comments article must have comments disabled
|
||||
no_comments = ArticlePage.objects.get(slug="e2e-no-comments")
|
||||
assert no_comments.comments_enabled is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_seed_e2e_content_is_idempotent():
|
||||
"""Running the command twice must not raise or create duplicates."""
|
||||
call_command("seed_e2e_content")
|
||||
call_command("seed_e2e_content")
|
||||
assert ArticlePage.objects.filter(slug="nightly-playwright-journey").count() == 1
|
||||
@@ -21,3 +21,47 @@ def test_consent_post_view(client):
|
||||
resp = client.post("/consent/", {"accept_all": "1"}, follow=False)
|
||||
assert resp.status_code == 302
|
||||
assert CONSENT_COOKIE_NAME in resp.cookies
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_consent_get_without_cookie_defaults_false():
|
||||
request = HttpRequest()
|
||||
state = ConsentService.get_consent(request)
|
||||
assert state.analytics is False
|
||||
assert state.advertising is False
|
||||
assert state.requires_prompt is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_consent_malformed_cookie_returns_safe_default():
|
||||
request = HttpRequest()
|
||||
request.COOKIES[CONSENT_COOKIE_NAME] = "not=a=valid%%%cookie"
|
||||
state = ConsentService.get_consent(request)
|
||||
assert state.analytics is False
|
||||
assert state.advertising is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_consent_post_preferences(client):
|
||||
resp = client.post("/consent/", {"analytics": "1", "advertising": ""})
|
||||
assert resp.status_code == 302
|
||||
value = resp.cookies[CONSENT_COOKIE_NAME].value
|
||||
assert "a=1" in value
|
||||
assert "d=0" in value
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_consent_get_method_not_allowed(client):
|
||||
resp = client.get("/consent/")
|
||||
assert resp.status_code == 405
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cookie_banner_hides_after_consent(client, home_page):
|
||||
first = client.get("/")
|
||||
assert "id=\"cookie-banner\"" in first.content.decode()
|
||||
consented = client.post("/consent/", {"accept_all": "1"})
|
||||
cookie_value = consented.cookies[CONSENT_COOKIE_NAME].value
|
||||
client.cookies[CONSENT_COOKIE_NAME] = cookie_value
|
||||
second = client.get("/")
|
||||
assert "id=\"cookie-banner\"" not in second.content.decode()
|
||||
|
||||
120
apps/core/tests/test_message_handling.py
Normal file
120
apps/core/tests/test_message_handling.py
Normal file
@@ -0,0 +1,120 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.contrib.messages import get_messages
|
||||
from django.contrib.messages.storage.fallback import FallbackStorage
|
||||
from django.contrib.sessions.middleware import SessionMiddleware
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.test import RequestFactory, override_settings
|
||||
from django.urls import include, path
|
||||
|
||||
from apps.core.middleware import AdminMessageGuardMiddleware
|
||||
|
||||
|
||||
def admin_message_test_view(request):
|
||||
messages.success(request, "Page 'Test page' has been updated.")
|
||||
messages.success(request, "Page 'Test page' has been published.")
|
||||
return render(request, "wagtailadmin/base.html", {})
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path("cms/__tests__/admin-messages/", admin_message_test_view),
|
||||
path("", include("config.urls")),
|
||||
]
|
||||
|
||||
|
||||
def _build_request(rf: RequestFactory, path: str):
|
||||
request = rf.get(path)
|
||||
SessionMiddleware(lambda req: None).process_request(request)
|
||||
request.session.save()
|
||||
request.user = AnonymousUser()
|
||||
setattr(request, "_messages", FallbackStorage(request))
|
||||
return request
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_message_guard_clears_stale_messages_on_frontend(rf):
|
||||
request = _build_request(rf, "/articles/test/")
|
||||
messages.success(request, "Page 'Test page' has been updated.")
|
||||
|
||||
response = AdminMessageGuardMiddleware(lambda req: HttpResponse("ok"))(request)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert list(get_messages(request)) == []
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_message_guard_preserves_admin_messages(rf):
|
||||
request = _build_request(rf, "/cms/pages/1/edit/")
|
||||
messages.success(request, "Page 'Test page' has been updated.")
|
||||
|
||||
response = AdminMessageGuardMiddleware(lambda req: HttpResponse("ok"))(request)
|
||||
remaining = list(get_messages(request))
|
||||
|
||||
assert response.status_code == 200
|
||||
assert len(remaining) == 1
|
||||
assert remaining[0].message == "Page 'Test page' has been updated."
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ROOT_URLCONF="apps.core.tests.test_message_handling")
|
||||
def test_admin_messages_have_auto_clear(client, django_user_model):
|
||||
"""The messages container must set auto-clear so messages dismiss themselves."""
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin-autoclear",
|
||||
email="admin-autoclear@example.com",
|
||||
password="admin-pass",
|
||||
)
|
||||
client.force_login(admin)
|
||||
|
||||
response = client.get("/cms/__tests__/admin-messages/")
|
||||
content = response.content.decode()
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "data-w-messages-auto-clear-value" in content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ROOT_URLCONF="apps.core.tests.test_message_handling")
|
||||
def test_server_rendered_messages_have_auto_dismiss_script(client, django_user_model):
|
||||
"""Server-rendered messages must include an inline script that removes them
|
||||
after a timeout, because the w-messages Stimulus controller only auto-clears
|
||||
messages added via JavaScript — not ones already in the HTML."""
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin-dismiss",
|
||||
email="admin-dismiss@example.com",
|
||||
password="admin-pass",
|
||||
)
|
||||
client.force_login(admin)
|
||||
|
||||
response = client.get("/cms/__tests__/admin-messages/")
|
||||
content = response.content.decode()
|
||||
|
||||
assert response.status_code == 200
|
||||
# Messages are rendered with the data-server-rendered marker
|
||||
assert "data-server-rendered" in content
|
||||
# The auto-dismiss script targets those markers
|
||||
assert "querySelectorAll" in content
|
||||
assert "[data-server-rendered]" in content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ROOT_URLCONF="apps.core.tests.test_message_handling")
|
||||
def test_admin_messages_render_all_messages(client, django_user_model):
|
||||
"""All messages should be rendered (no de-duplication filtering)."""
|
||||
admin = django_user_model.objects.create_superuser(
|
||||
username="admin-render",
|
||||
email="admin-render@example.com",
|
||||
password="admin-pass",
|
||||
)
|
||||
client.force_login(admin)
|
||||
|
||||
response = client.get("/cms/__tests__/admin-messages/")
|
||||
content = response.content.decode()
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "has been updated." in content
|
||||
assert "has been published." in content
|
||||
@@ -21,10 +21,20 @@ def test_context_processor_returns_sitesettings(home_page):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_tag_css_fallback():
|
||||
def test_get_tag_css_auto_colour():
|
||||
"""Tags without metadata get a deterministic auto-assigned colour."""
|
||||
tag = Tag.objects.create(name="x", slug="x")
|
||||
value = core_tags.get_tag_css(tag)
|
||||
assert "bg-zinc" in value
|
||||
assert "bg-" in value
|
||||
assert "text-" in value
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_tag_border_css_auto_colour():
|
||||
"""Tags without metadata get a deterministic auto-assigned border colour."""
|
||||
tag = Tag.objects.create(name="y", slug="y")
|
||||
value = core_tags.get_tag_border_css(tag)
|
||||
assert "border-" in value
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
191
apps/core/tests/test_navigation.py
Normal file
191
apps/core/tests/test_navigation.py
Normal file
@@ -0,0 +1,191 @@
|
||||
import pytest
|
||||
from wagtail.models import Site
|
||||
|
||||
from apps.blog.models import AboutPage, ArticleIndexPage
|
||||
from apps.core.models import NavigationMenuItem, SiteSettings, SocialMediaLink
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def site_with_nav(home_page):
|
||||
"""Create SiteSettings with nav items and social links for testing."""
|
||||
site = Site.objects.get(is_default_site=True)
|
||||
settings, _ = SiteSettings.objects.get_or_create(site=site)
|
||||
|
||||
# Clear any items seeded by the data migration
|
||||
settings.navigation_items.all().delete()
|
||||
settings.social_links.all().delete()
|
||||
|
||||
# Create article index and about page
|
||||
article_index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=article_index)
|
||||
article_index.save_revision().publish()
|
||||
|
||||
about = AboutPage(
|
||||
title="About",
|
||||
slug="about",
|
||||
mission_statement="Test mission",
|
||||
body="<p>About page</p>",
|
||||
)
|
||||
home_page.add_child(instance=about)
|
||||
about.save_revision().publish()
|
||||
|
||||
# Create nav items
|
||||
NavigationMenuItem.objects.create(
|
||||
settings=settings,
|
||||
link_page=home_page,
|
||||
link_title="Home",
|
||||
show_in_header=True,
|
||||
show_in_footer=True,
|
||||
sort_order=0,
|
||||
)
|
||||
NavigationMenuItem.objects.create(
|
||||
settings=settings,
|
||||
link_page=article_index,
|
||||
link_title="Articles",
|
||||
show_in_header=True,
|
||||
show_in_footer=True,
|
||||
sort_order=1,
|
||||
)
|
||||
NavigationMenuItem.objects.create(
|
||||
settings=settings,
|
||||
link_page=about,
|
||||
link_title="About",
|
||||
show_in_header=True,
|
||||
show_in_footer=False,
|
||||
sort_order=2,
|
||||
)
|
||||
|
||||
# Social links
|
||||
SocialMediaLink.objects.create(
|
||||
settings=settings,
|
||||
platform="twitter",
|
||||
url="https://twitter.com/nohypeai",
|
||||
label="Twitter (X)",
|
||||
sort_order=0,
|
||||
)
|
||||
SocialMediaLink.objects.create(
|
||||
settings=settings,
|
||||
platform="rss",
|
||||
url="/feed/",
|
||||
label="RSS Feed",
|
||||
sort_order=1,
|
||||
)
|
||||
|
||||
return settings
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestNavigationMenuItem:
|
||||
def test_live_page_is_rendered(self, site_with_nav):
|
||||
items = [i for i in site_with_nav.navigation_items.all() if i.is_live]
|
||||
assert len(items) == 3
|
||||
|
||||
def test_unpublished_page_excluded(self, site_with_nav):
|
||||
about_item = site_with_nav.navigation_items.get(link_title="About")
|
||||
about_item.link_page.unpublish()
|
||||
items = [i for i in site_with_nav.navigation_items.all() if i.is_live]
|
||||
assert len(items) == 2
|
||||
assert all(i.link_title != "About" for i in items)
|
||||
|
||||
def test_external_url_item(self, site_with_nav):
|
||||
NavigationMenuItem.objects.create(
|
||||
settings=site_with_nav,
|
||||
link_url="https://example.com",
|
||||
link_title="External",
|
||||
sort_order=10,
|
||||
)
|
||||
item = site_with_nav.navigation_items.get(link_title="External")
|
||||
assert item.is_live is True
|
||||
assert item.url == "https://example.com"
|
||||
assert item.title == "External"
|
||||
|
||||
def test_title_falls_back_to_page_title(self, site_with_nav):
|
||||
item = site_with_nav.navigation_items.get(sort_order=0)
|
||||
item.link_title = ""
|
||||
item.save()
|
||||
assert item.title == item.link_page.title
|
||||
|
||||
def test_header_footer_filtering(self, site_with_nav):
|
||||
header_items = site_with_nav.navigation_items.filter(show_in_header=True)
|
||||
footer_items = site_with_nav.navigation_items.filter(show_in_footer=True)
|
||||
assert header_items.count() == 3
|
||||
assert footer_items.count() == 2 # About excluded from footer
|
||||
|
||||
def test_sort_order_respected(self, site_with_nav):
|
||||
items = list(site_with_nav.navigation_items.all().order_by("sort_order"))
|
||||
assert [i.link_title for i in items] == ["Home", "Articles", "About"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSocialMediaLink:
|
||||
def test_display_label_from_field(self, site_with_nav):
|
||||
link = site_with_nav.social_links.get(platform="twitter")
|
||||
assert link.display_label == "Twitter (X)"
|
||||
|
||||
def test_display_label_fallback(self, site_with_nav):
|
||||
link = site_with_nav.social_links.get(platform="twitter")
|
||||
link.label = ""
|
||||
assert link.display_label == "Twitter / X"
|
||||
|
||||
def test_icon_template_path(self, site_with_nav):
|
||||
link = site_with_nav.social_links.get(platform="rss")
|
||||
assert link.icon_template == "components/icons/rss.html"
|
||||
|
||||
def test_ordering(self, site_with_nav):
|
||||
links = list(site_with_nav.social_links.all().order_by("sort_order"))
|
||||
assert [link.platform for link in links] == ["twitter", "rss"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSiteSettingsDefaults:
|
||||
def test_default_site_name(self, home_page):
|
||||
site = Site.objects.get(is_default_site=True)
|
||||
settings, _ = SiteSettings.objects.get_or_create(site=site)
|
||||
assert settings.site_name == "NO HYPE AI"
|
||||
|
||||
def test_default_copyright(self, home_page):
|
||||
site = Site.objects.get(is_default_site=True)
|
||||
settings, _ = SiteSettings.objects.get_or_create(site=site)
|
||||
assert settings.copyright_text == "No Hype AI. All rights reserved."
|
||||
|
||||
def test_default_tagline(self, home_page):
|
||||
site = Site.objects.get(is_default_site=True)
|
||||
settings, _ = SiteSettings.objects.get_or_create(site=site)
|
||||
assert settings.tagline == "Honest AI tool reviews for developers."
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestNavRendering:
|
||||
def test_header_shows_nav_items(self, client, site_with_nav):
|
||||
resp = client.get("/")
|
||||
content = resp.content.decode()
|
||||
assert "Home" in content
|
||||
assert "Articles" in content
|
||||
assert "About" in content
|
||||
|
||||
def test_unpublished_page_not_in_header(self, client, site_with_nav):
|
||||
about_item = site_with_nav.navigation_items.get(link_title="About")
|
||||
about_item.link_page.unpublish()
|
||||
resp = client.get("/")
|
||||
content = resp.content.decode()
|
||||
# About should not appear as a nav link (but might appear elsewhere on page)
|
||||
assert 'href="/about/"' not in content
|
||||
|
||||
def test_footer_shows_nav_items(self, client, site_with_nav):
|
||||
resp = client.get("/")
|
||||
content = resp.content.decode()
|
||||
# Footer should have social links
|
||||
assert "Twitter (X)" in content
|
||||
assert "RSS Feed" in content
|
||||
|
||||
def test_footer_shows_branding(self, client, site_with_nav):
|
||||
site_with_nav.site_name = "TEST SITE"
|
||||
site_with_nav.save()
|
||||
resp = client.get("/")
|
||||
content = resp.content.decode()
|
||||
assert "TEST SITE" in content
|
||||
|
||||
def test_footer_shows_copyright(self, client, site_with_nav):
|
||||
resp = client.get("/")
|
||||
content = resp.content.decode()
|
||||
assert "No Hype AI. All rights reserved." in content
|
||||
47
apps/core/tests/test_nightly_e2e_playwright.py
Normal file
47
apps/core/tests/test_nightly_e2e_playwright.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import expect, sync_playwright
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_nightly_playwright_journey() -> None:
|
||||
base_url = os.getenv("E2E_BASE_URL")
|
||||
if not base_url:
|
||||
pytest.skip("E2E_BASE_URL is not set")
|
||||
|
||||
base_url = base_url.rstrip("/")
|
||||
|
||||
with sync_playwright() as pw:
|
||||
browser = pw.chromium.launch()
|
||||
page = browser.new_page()
|
||||
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
expect(page.locator("#cookie-banner")).to_be_visible()
|
||||
page.get_by_role("button", name="Toggle theme").click()
|
||||
page.get_by_role("button", name="Accept all").first.click()
|
||||
expect(page.locator("#cookie-banner")).to_have_count(0)
|
||||
|
||||
page.goto(f"{base_url}/articles/", wait_until="networkidle")
|
||||
first_article_link = page.locator("main article a").first
|
||||
expect(first_article_link).to_be_visible()
|
||||
article_href = first_article_link.get_attribute("href")
|
||||
assert article_href
|
||||
|
||||
article_url = article_href if article_href.startswith("http") else f"{base_url}{article_href}"
|
||||
page.goto(article_url, wait_until="networkidle")
|
||||
expect(page.get_by_role("heading", name="Comments", exact=True)).to_be_visible()
|
||||
expect(page.get_by_role("button", name="Post comment")).to_be_visible()
|
||||
|
||||
page.goto(f"{base_url}/feed/", wait_until="networkidle")
|
||||
feed_content = page.content()
|
||||
assert (
|
||||
"<rss" in feed_content
|
||||
or "<feed" in feed_content
|
||||
or "<rss" in feed_content
|
||||
or "<feed" in feed_content
|
||||
)
|
||||
|
||||
browser.close()
|
||||
75
apps/core/tests/test_performance.py
Normal file
75
apps/core/tests/test_performance.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import pytest
|
||||
from taggit.models import Tag
|
||||
from wagtail.models import Site
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage, HomePage, TagMetadata
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
|
||||
|
||||
def _build_article_tree(home_page: HomePage, count: int = 12):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
tag = Tag.objects.create(name="Bench", slug="bench")
|
||||
TagMetadata.objects.create(tag=tag, colour="cyan")
|
||||
|
||||
for n in range(count):
|
||||
article = ArticlePage(
|
||||
title=f"Article {n}",
|
||||
slug=f"article-{n}",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body words</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.tags.add(tag)
|
||||
article.save_revision().publish()
|
||||
return index
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_homepage_query_budget(rf, home_page, django_assert_num_queries):
|
||||
_build_article_tree(home_page, count=8)
|
||||
request = rf.get("/")
|
||||
request.site = Site.objects.get(is_default_site=True)
|
||||
with django_assert_num_queries(10, exact=False):
|
||||
context = home_page.get_context(request)
|
||||
list(context["latest_articles"])
|
||||
list(context["more_articles"])
|
||||
assert len(context["latest_articles"]) <= 5
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_index_query_budget(rf, home_page, django_assert_num_queries):
|
||||
index = _build_article_tree(home_page, count=12)
|
||||
request = rf.get("/articles/")
|
||||
request.site = Site.objects.get(is_default_site=True)
|
||||
with django_assert_num_queries(12, exact=False):
|
||||
context = index.get_context(request)
|
||||
list(context["articles"])
|
||||
list(context["available_tags"])
|
||||
assert context["paginator"].count == 12
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_read_query_budget(rf, home_page, django_assert_num_queries):
|
||||
index = _build_article_tree(home_page, count=4)
|
||||
article = ArticlePage.objects.child_of(index).live().first()
|
||||
assert article is not None
|
||||
request = rf.get(article.url)
|
||||
request.site = Site.objects.get(is_default_site=True)
|
||||
with django_assert_num_queries(8, exact=False):
|
||||
context = article.get_context(request)
|
||||
list(context["related_articles"])
|
||||
list(context["approved_comments"])
|
||||
assert context["related_articles"] is not None
|
||||
|
||||
|
||||
def test_read_time_benchmark(benchmark):
|
||||
author = AuthorFactory.build()
|
||||
body = [("rich_text", "<p>" + "word " * 1000 + "</p>")]
|
||||
article = ArticlePage(title="Bench", slug="bench", author=author, summary="summary", body=body)
|
||||
|
||||
result = benchmark(article._compute_read_time)
|
||||
assert result >= 1
|
||||
assert benchmark.stats.stats.mean < 0.05
|
||||
101
apps/core/tests/test_security.py
Normal file
101
apps/core/tests/test_security.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_security_headers_present(client, home_page):
|
||||
resp = client.get("/")
|
||||
assert resp.status_code == 200
|
||||
assert "Content-Security-Policy" in resp
|
||||
assert "Permissions-Policy" in resp
|
||||
assert "unsafe-inline" not in resp["Content-Security-Policy"]
|
||||
assert "script-src" in resp["Content-Security-Policy"]
|
||||
assert resp["X-Frame-Options"] == "SAMEORIGIN"
|
||||
assert "strict-origin-when-cross-origin" in resp["Referrer-Policy"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_csp_nonce_applied_to_inline_script(client, home_page):
|
||||
resp = client.get("/")
|
||||
csp = resp["Content-Security-Policy"]
|
||||
match = re.search(r"nonce-([^' ;]+)", csp)
|
||||
assert match
|
||||
nonce = match.group(1)
|
||||
html = resp.content.decode()
|
||||
assert f'nonce="{nonce}"' in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_robots_disallows_cms_and_contains_sitemap(client):
|
||||
resp = client.get("/robots.txt")
|
||||
body = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert "Disallow: /cms/" in body
|
||||
assert "Sitemap:" in body
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_obscured_path_redirects_to_cms(client):
|
||||
resp = client.get("/admin/")
|
||||
assert resp.status_code == 302
|
||||
assert resp["Location"] == "/cms/"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_comment_form_contains_csrf_token(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="CSRF Article",
|
||||
slug="csrf-article",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>Body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/csrf-article/")
|
||||
html = resp.content.decode()
|
||||
assert resp.status_code == 200
|
||||
assert "csrfmiddlewaretoken" in html
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_consent_rejects_open_redirect(client, home_page):
|
||||
resp = client.post(
|
||||
"/consent/",
|
||||
{"reject_all": "1"},
|
||||
HTTP_REFERER="https://evil.example.com/phish",
|
||||
)
|
||||
assert resp.status_code == 302
|
||||
assert resp["Location"] == "/"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_article_json_ld_script_has_csp_nonce(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="Nonce Article",
|
||||
slug="nonce-article",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>Body</p>")],
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/articles/nonce-article/")
|
||||
csp = resp["Content-Security-Policy"]
|
||||
match = re.search(r"nonce-([^' ;]+)", csp)
|
||||
assert match
|
||||
nonce = match.group(1)
|
||||
html = resp.content.decode()
|
||||
assert f'type="application/ld+json" nonce="{nonce}"' in html
|
||||
@@ -1,5 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from apps.blog.models import ArticleIndexPage, ArticlePage, Category
|
||||
from apps.blog.tests.factories import AuthorFactory
|
||||
from apps.legal.models import LegalIndexPage, LegalPage
|
||||
|
||||
|
||||
@@ -13,3 +15,36 @@ def test_get_legal_pages_tag(client, home_page):
|
||||
|
||||
resp = client.get("/")
|
||||
assert resp.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_categories_nav_tag_renders_category_link(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
category = Category.objects.create(name="Reviews", slug="reviews", show_in_nav=True)
|
||||
author = AuthorFactory()
|
||||
article = ArticlePage(
|
||||
title="R1",
|
||||
slug="r1",
|
||||
author=author,
|
||||
summary="summary",
|
||||
body=[("rich_text", "<p>body</p>")],
|
||||
category=category,
|
||||
)
|
||||
index.add_child(instance=article)
|
||||
article.save_revision().publish()
|
||||
|
||||
resp = client.get("/")
|
||||
assert resp.status_code == 200
|
||||
assert "/articles/category/reviews/" in resp.content.decode()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_categories_nav_tag_includes_empty_nav_category(client, home_page):
|
||||
index = ArticleIndexPage(title="Articles", slug="articles")
|
||||
home_page.add_child(instance=index)
|
||||
Category.objects.create(name="Benchmarks", slug="benchmarks", show_in_nav=True)
|
||||
|
||||
resp = client.get("/")
|
||||
assert resp.status_code == 200
|
||||
assert "/articles/category/benchmarks/" in resp.content.decode()
|
||||
|
||||
@@ -2,6 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotAllowed
|
||||
from django.shortcuts import redirect, render
|
||||
from django.utils.http import url_has_allowed_host_and_scheme
|
||||
|
||||
from apps.core.consent import ConsentService
|
||||
|
||||
@@ -24,6 +25,12 @@ def consent_view(request: HttpRequest) -> HttpResponse:
|
||||
advertising = request.POST.get("advertising") in {"true", "1", "on"}
|
||||
|
||||
target = request.META.get("HTTP_REFERER", "/")
|
||||
if not url_has_allowed_host_and_scheme(
|
||||
url=target,
|
||||
allowed_hosts={request.get_host()},
|
||||
require_https=request.is_secure(),
|
||||
):
|
||||
target = "/"
|
||||
response = redirect(target)
|
||||
ConsentService.set_consent(response, analytics=analytics, advertising=advertising)
|
||||
return response
|
||||
|
||||
1
apps/health/__init__.py
Normal file
1
apps/health/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
6
apps/health/apps.py
Normal file
6
apps/health/apps.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class HealthConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "apps.health"
|
||||
80
apps/health/checks.py
Normal file
80
apps/health/checks.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
|
||||
BACKUP_MAX_AGE_SECONDS = 48 * 60 * 60
|
||||
|
||||
|
||||
def check_db() -> dict[str, float | str]:
|
||||
started = time.perf_counter()
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
except Exception as exc:
|
||||
return {"status": "fail", "detail": str(exc)}
|
||||
return {"status": "ok", "latency_ms": (time.perf_counter() - started) * 1000}
|
||||
|
||||
|
||||
def check_cache() -> dict[str, float | str]:
|
||||
cache_key = f"health:{uuid.uuid4().hex}"
|
||||
probe_value = uuid.uuid4().hex
|
||||
started = time.perf_counter()
|
||||
try:
|
||||
cache.set(cache_key, probe_value, timeout=5)
|
||||
cached_value = cache.get(cache_key)
|
||||
if cached_value != probe_value:
|
||||
return {"status": "fail", "detail": "Cache probe returned unexpected value"}
|
||||
cache.delete(cache_key)
|
||||
except Exception as exc:
|
||||
return {"status": "fail", "detail": str(exc)}
|
||||
return {"status": "ok", "latency_ms": (time.perf_counter() - started) * 1000}
|
||||
|
||||
|
||||
def check_celery() -> dict[str, str]:
|
||||
broker_url = os.environ.get("CELERY_BROKER_URL")
|
||||
if not broker_url:
|
||||
return {"status": "ok", "detail": "Celery not configured: CELERY_BROKER_URL is unset"}
|
||||
|
||||
try:
|
||||
kombu = importlib.import_module("kombu")
|
||||
except ImportError:
|
||||
return {"status": "ok", "detail": "Celery broker check skipped: kombu is not installed"}
|
||||
|
||||
try:
|
||||
with kombu.Connection(broker_url, connect_timeout=3) as broker_connection:
|
||||
broker_connection.ensure_connection(max_retries=1)
|
||||
except Exception as exc:
|
||||
return {"status": "fail", "detail": str(exc)}
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
def check_backup() -> dict[str, str]:
|
||||
backup_status_file = os.environ.get("BACKUP_STATUS_FILE")
|
||||
if not backup_status_file:
|
||||
return {"status": "fail", "detail": "Backup monitoring not configured: BACKUP_STATUS_FILE is unset"}
|
||||
|
||||
try:
|
||||
raw_timestamp = Path(backup_status_file).read_text(encoding="utf-8").strip()
|
||||
except FileNotFoundError:
|
||||
return {"status": "fail", "detail": f"Backup status file not found: {backup_status_file}"}
|
||||
except OSError as exc:
|
||||
return {"status": "fail", "detail": str(exc)}
|
||||
|
||||
try:
|
||||
last_backup_at = float(raw_timestamp)
|
||||
except ValueError:
|
||||
return {"status": "fail", "detail": "Invalid backup status file"}
|
||||
|
||||
age_seconds = time.time() - last_backup_at
|
||||
if age_seconds > BACKUP_MAX_AGE_SECONDS:
|
||||
age_hours = age_seconds / 3600
|
||||
return {"status": "fail", "detail": f"Last backup is {age_hours:.1f} hours old (> 48 h)"}
|
||||
|
||||
return {"status": "ok"}
|
||||
1
apps/health/tests/__init__.py
Normal file
1
apps/health/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
205
apps/health/tests/test_checks.py
Normal file
205
apps/health/tests/test_checks.py
Normal file
@@ -0,0 +1,205 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import time
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
from django.db.utils import OperationalError
|
||||
|
||||
from apps.health import checks
|
||||
|
||||
|
||||
class SuccessfulCursor:
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
return False
|
||||
|
||||
def execute(self, query):
|
||||
self.query = query
|
||||
|
||||
|
||||
class FailingCursor:
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
return False
|
||||
|
||||
def execute(self, query):
|
||||
raise OperationalError("database unavailable")
|
||||
|
||||
|
||||
class FakeCache:
|
||||
def __init__(self, value_to_return=None):
|
||||
self.value_to_return = value_to_return
|
||||
self.stored = {}
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
self.stored[key] = value
|
||||
|
||||
def get(self, key):
|
||||
if self.value_to_return is not None:
|
||||
return self.value_to_return
|
||||
return self.stored.get(key)
|
||||
|
||||
def delete(self, key):
|
||||
self.stored.pop(key, None)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_db_ok(monkeypatch):
|
||||
monkeypatch.setattr(checks.connection, "cursor", lambda: SuccessfulCursor())
|
||||
|
||||
result = checks.check_db()
|
||||
|
||||
assert result["status"] == "ok"
|
||||
assert "latency_ms" in result
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_db_fail(monkeypatch):
|
||||
monkeypatch.setattr(checks.connection, "cursor", lambda: FailingCursor())
|
||||
|
||||
result = checks.check_db()
|
||||
|
||||
assert result == {"status": "fail", "detail": "database unavailable"}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cache_ok(monkeypatch):
|
||||
monkeypatch.setattr(checks, "cache", FakeCache())
|
||||
|
||||
result = checks.check_cache()
|
||||
|
||||
assert result["status"] == "ok"
|
||||
assert "latency_ms" in result
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cache_fail(monkeypatch):
|
||||
monkeypatch.setattr(checks, "cache", FakeCache(value_to_return="wrong-value"))
|
||||
|
||||
result = checks.check_cache()
|
||||
|
||||
assert result == {"status": "fail", "detail": "Cache probe returned unexpected value"}
|
||||
|
||||
|
||||
def test_celery_no_broker(monkeypatch):
|
||||
monkeypatch.delenv("CELERY_BROKER_URL", raising=False)
|
||||
|
||||
result = checks.check_celery()
|
||||
|
||||
assert result["status"] == "ok"
|
||||
assert "CELERY_BROKER_URL is unset" in result["detail"]
|
||||
|
||||
|
||||
def test_celery_no_kombu(monkeypatch):
|
||||
monkeypatch.setenv("CELERY_BROKER_URL", "redis://broker")
|
||||
|
||||
def raise_import_error(name):
|
||||
raise ImportError(name)
|
||||
|
||||
monkeypatch.setattr(importlib, "import_module", raise_import_error)
|
||||
|
||||
result = checks.check_celery()
|
||||
|
||||
assert result["status"] == "ok"
|
||||
assert "kombu is not installed" in result["detail"]
|
||||
|
||||
|
||||
def test_celery_ok(monkeypatch):
|
||||
monkeypatch.setenv("CELERY_BROKER_URL", "redis://broker")
|
||||
|
||||
class FakeBrokerConnection:
|
||||
def __init__(self, url, connect_timeout):
|
||||
self.url = url
|
||||
self.connect_timeout = connect_timeout
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
return False
|
||||
|
||||
def ensure_connection(self, max_retries):
|
||||
self.max_retries = max_retries
|
||||
|
||||
monkeypatch.setattr(importlib, "import_module", lambda name: SimpleNamespace(Connection=FakeBrokerConnection))
|
||||
|
||||
result = checks.check_celery()
|
||||
|
||||
assert result == {"status": "ok"}
|
||||
|
||||
|
||||
def test_celery_fail(monkeypatch):
|
||||
monkeypatch.setenv("CELERY_BROKER_URL", "redis://broker")
|
||||
|
||||
class BrokenBrokerConnection:
|
||||
def __init__(self, url, connect_timeout):
|
||||
self.url = url
|
||||
self.connect_timeout = connect_timeout
|
||||
|
||||
def __enter__(self):
|
||||
raise OSError("broker down")
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
return False
|
||||
|
||||
monkeypatch.setattr(importlib, "import_module", lambda name: SimpleNamespace(Connection=BrokenBrokerConnection))
|
||||
|
||||
result = checks.check_celery()
|
||||
|
||||
assert result == {"status": "fail", "detail": "broker down"}
|
||||
|
||||
|
||||
def test_backup_no_env(monkeypatch):
|
||||
monkeypatch.delenv("BACKUP_STATUS_FILE", raising=False)
|
||||
|
||||
result = checks.check_backup()
|
||||
|
||||
assert result["status"] == "fail"
|
||||
assert "BACKUP_STATUS_FILE is unset" in result["detail"]
|
||||
|
||||
|
||||
def test_backup_missing_file(monkeypatch, tmp_path):
|
||||
status_file = tmp_path / "missing-backup-status"
|
||||
monkeypatch.setenv("BACKUP_STATUS_FILE", str(status_file))
|
||||
|
||||
result = checks.check_backup()
|
||||
|
||||
assert result == {"status": "fail", "detail": f"Backup status file not found: {status_file}"}
|
||||
|
||||
|
||||
def test_backup_fresh(monkeypatch, tmp_path):
|
||||
status_file = tmp_path / "backup-status"
|
||||
status_file.write_text(str(time.time() - 60), encoding="utf-8")
|
||||
monkeypatch.setenv("BACKUP_STATUS_FILE", str(status_file))
|
||||
|
||||
result = checks.check_backup()
|
||||
|
||||
assert result == {"status": "ok"}
|
||||
|
||||
|
||||
def test_backup_stale(monkeypatch, tmp_path):
|
||||
status_file = tmp_path / "backup-status"
|
||||
stale_timestamp = time.time() - (checks.BACKUP_MAX_AGE_SECONDS + 1)
|
||||
status_file.write_text(str(stale_timestamp), encoding="utf-8")
|
||||
monkeypatch.setenv("BACKUP_STATUS_FILE", str(status_file))
|
||||
|
||||
result = checks.check_backup()
|
||||
|
||||
assert result["status"] == "fail"
|
||||
assert "Last backup is" in result["detail"]
|
||||
|
||||
|
||||
def test_backup_invalid(monkeypatch, tmp_path):
|
||||
status_file = tmp_path / "backup-status"
|
||||
status_file.write_text("not-a-timestamp", encoding="utf-8")
|
||||
monkeypatch.setenv("BACKUP_STATUS_FILE", str(status_file))
|
||||
|
||||
result = checks.check_backup()
|
||||
|
||||
assert result == {"status": "fail", "detail": "Invalid backup status file"}
|
||||
103
apps/health/tests/test_views.py
Normal file
103
apps/health/tests/test_views.py
Normal file
@@ -0,0 +1,103 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _mock_checks(monkeypatch, **overrides):
|
||||
payloads = {
|
||||
"db": {"status": "ok", "latency_ms": 1.0},
|
||||
"cache": {"status": "ok", "latency_ms": 1.0},
|
||||
"celery": {"status": "ok"},
|
||||
"backup": {"status": "ok"},
|
||||
}
|
||||
payloads.update(overrides)
|
||||
|
||||
monkeypatch.setattr("apps.health.views.check_db", lambda: payloads["db"])
|
||||
monkeypatch.setattr("apps.health.views.check_cache", lambda: payloads["cache"])
|
||||
monkeypatch.setattr("apps.health.views.check_celery", lambda: payloads["celery"])
|
||||
monkeypatch.setattr("apps.health.views.check_backup", lambda: payloads["backup"])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_healthy(client, monkeypatch):
|
||||
_mock_checks(monkeypatch)
|
||||
|
||||
response = client.get("/health/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == "ok"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_degraded_celery(client, monkeypatch):
|
||||
_mock_checks(monkeypatch, celery={"status": "fail", "detail": "broker down"})
|
||||
|
||||
response = client.get("/health/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == "degraded"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_degraded_backup(client, monkeypatch):
|
||||
_mock_checks(monkeypatch, backup={"status": "fail", "detail": "backup missing"})
|
||||
|
||||
response = client.get("/health/")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == "degraded"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_unhealthy_db(client, monkeypatch):
|
||||
_mock_checks(monkeypatch, db={"status": "fail", "detail": "db down"})
|
||||
|
||||
response = client.get("/health/")
|
||||
|
||||
assert response.status_code == 503
|
||||
assert response.json()["status"] == "unhealthy"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_unhealthy_cache(client, monkeypatch):
|
||||
_mock_checks(monkeypatch, cache={"status": "fail", "detail": "cache down"})
|
||||
|
||||
response = client.get("/health/")
|
||||
|
||||
assert response.status_code == 503
|
||||
assert response.json()["status"] == "unhealthy"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_response_shape(client, monkeypatch):
|
||||
_mock_checks(monkeypatch)
|
||||
|
||||
payload = client.get("/health/").json()
|
||||
|
||||
assert set(payload) == {"status", "version", "checks", "timestamp"}
|
||||
assert set(payload["version"]) == {"git_sha", "build"}
|
||||
assert set(payload["checks"]) == {"db", "cache", "celery", "backup"}
|
||||
assert re.fullmatch(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z", payload["timestamp"])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_version_fields(client, monkeypatch):
|
||||
_mock_checks(monkeypatch)
|
||||
monkeypatch.setenv("GIT_SHA", "59cc1c4")
|
||||
monkeypatch.setenv("BUILD_ID", "build-20260306-59cc1c4")
|
||||
|
||||
payload = client.get("/health/").json()
|
||||
|
||||
assert payload["version"]["git_sha"] == "59cc1c4"
|
||||
assert payload["version"]["build"] == "build-20260306-59cc1c4"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_cache_headers(client, monkeypatch):
|
||||
_mock_checks(monkeypatch)
|
||||
|
||||
response = client.get("/health/")
|
||||
|
||||
assert "no-cache" in response["Cache-Control"]
|
||||
7
apps/health/urls.py
Normal file
7
apps/health/urls.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from django.urls import path
|
||||
|
||||
from apps.health.views import health_view
|
||||
|
||||
urlpatterns = [
|
||||
path("", health_view, name="health"),
|
||||
]
|
||||
42
apps/health/views.py
Normal file
42
apps/health/views.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from collections.abc import Mapping
|
||||
from datetime import UTC, datetime
|
||||
from typing import cast
|
||||
|
||||
from django.http import JsonResponse
|
||||
from django.views.decorators.cache import never_cache
|
||||
|
||||
from apps.health.checks import check_backup, check_cache, check_celery, check_db
|
||||
|
||||
CRITICAL_CHECKS = {"db", "cache"}
|
||||
|
||||
|
||||
@never_cache
|
||||
def health_view(request):
|
||||
checks: dict[str, Mapping[str, object]] = {
|
||||
"db": check_db(),
|
||||
"cache": check_cache(),
|
||||
"celery": check_celery(),
|
||||
"backup": check_backup(),
|
||||
}
|
||||
|
||||
if any(cast(str, checks[name]["status"]) == "fail" for name in CRITICAL_CHECKS):
|
||||
overall_status = "unhealthy"
|
||||
elif any(cast(str, check["status"]) == "fail" for check in checks.values()):
|
||||
overall_status = "degraded"
|
||||
else:
|
||||
overall_status = "ok"
|
||||
|
||||
payload = {
|
||||
"status": overall_status,
|
||||
"version": {
|
||||
"git_sha": os.environ.get("GIT_SHA", "unknown"),
|
||||
"build": os.environ.get("BUILD_ID", "unknown"),
|
||||
},
|
||||
"checks": checks,
|
||||
"timestamp": datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
}
|
||||
response_status = 503 if overall_status == "unhealthy" else 200
|
||||
return JsonResponse(payload, status=response_status)
|
||||
@@ -1,6 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -15,9 +18,26 @@ class ProviderSyncService:
|
||||
|
||||
|
||||
class ButtondownSyncService(ProviderSyncService):
|
||||
endpoint = "https://api.buttondown.email/v1/subscribers"
|
||||
|
||||
def sync(self, subscription):
|
||||
logger.info("Synced subscription %s", subscription.email)
|
||||
api_key = os.getenv("BUTTONDOWN_API_KEY", "")
|
||||
if not api_key:
|
||||
raise ProviderSyncError("BUTTONDOWN_API_KEY is not configured")
|
||||
|
||||
response = requests.post(
|
||||
self.endpoint,
|
||||
headers={"Authorization": f"Token {api_key}", "Content-Type": "application/json"},
|
||||
json={"email": subscription.email},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code >= 400:
|
||||
raise ProviderSyncError(f"Buttondown sync failed: {response.status_code}")
|
||||
logger.info("Synced subscription %s to Buttondown", subscription.email)
|
||||
|
||||
|
||||
def get_provider_service() -> ProviderSyncService:
|
||||
provider = os.getenv("NEWSLETTER_PROVIDER", "buttondown").lower().strip()
|
||||
if provider != "buttondown":
|
||||
raise ProviderSyncError(f"Unsupported newsletter provider: {provider}")
|
||||
return ButtondownSyncService()
|
||||
|
||||
@@ -12,6 +12,24 @@ def test_subscribe_ok(client):
|
||||
assert NewsletterSubscription.objects.filter(email="a@example.com").exists()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_subscribe_sends_confirmation_email(client, mailoutbox):
|
||||
resp = client.post("/newsletter/subscribe/", {"email": "new@example.com", "source": "nav"})
|
||||
assert resp.status_code == 200
|
||||
assert len(mailoutbox) == 1
|
||||
assert "Confirm your No Hype AI newsletter subscription" in mailoutbox[0].subject
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_duplicate_subscribe_returns_ok_without_extra_email(client, mailoutbox):
|
||||
client.post("/newsletter/subscribe/", {"email": "dupe@example.com", "source": "nav"})
|
||||
assert len(mailoutbox) == 1
|
||||
resp = client.post("/newsletter/subscribe/", {"email": "dupe@example.com", "source": "footer"})
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["status"] == "ok"
|
||||
assert len(mailoutbox) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_subscribe_invalid(client):
|
||||
resp = client.post("/newsletter/subscribe/", {"email": "bad"})
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from django.core import signing
|
||||
from django.core.mail import EmailMultiAlternatives
|
||||
from django.http import Http404, JsonResponse
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls import reverse
|
||||
from django.views import View
|
||||
|
||||
from apps.newsletter.forms import SubscriptionForm
|
||||
@@ -10,6 +15,27 @@ from apps.newsletter.models import NewsletterSubscription
|
||||
from apps.newsletter.services import ProviderSyncError, get_provider_service
|
||||
|
||||
CONFIRMATION_TOKEN_MAX_AGE_SECONDS = 60 * 60 * 24 * 2
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def confirmation_token(email: str) -> str:
|
||||
return signing.dumps(email, salt="newsletter-confirm")
|
||||
|
||||
|
||||
def send_confirmation_email(request, subscription: NewsletterSubscription) -> None:
|
||||
token = confirmation_token(subscription.email)
|
||||
confirm_url = request.build_absolute_uri(reverse("newsletter_confirm", args=[token]))
|
||||
context = {"confirmation_url": confirm_url, "subscription": subscription}
|
||||
subject = render_to_string("newsletter/email/confirmation_subject.txt", context).strip()
|
||||
text_body = render_to_string("newsletter/email/confirmation_body.txt", context)
|
||||
html_body = render_to_string("newsletter/email/confirmation_body.html", context)
|
||||
message = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_body,
|
||||
to=[subscription.email],
|
||||
)
|
||||
message.attach_alternative(html_body, "text/html")
|
||||
message.send()
|
||||
|
||||
|
||||
class SubscribeView(View):
|
||||
@@ -20,9 +46,14 @@ class SubscribeView(View):
|
||||
if form.cleaned_data.get("honeypot"):
|
||||
return JsonResponse({"status": "ok"})
|
||||
|
||||
email = form.cleaned_data["email"]
|
||||
email = form.cleaned_data["email"].lower().strip()
|
||||
source = form.cleaned_data.get("source") or "unknown"
|
||||
NewsletterSubscription.objects.get_or_create(email=email, defaults={"source": source})
|
||||
subscription, created = NewsletterSubscription.objects.get_or_create(
|
||||
email=email,
|
||||
defaults={"source": source},
|
||||
)
|
||||
if created and not subscription.confirmed:
|
||||
send_confirmation_email(request, subscription)
|
||||
return JsonResponse({"status": "ok"})
|
||||
|
||||
|
||||
@@ -42,10 +73,6 @@ class ConfirmView(View):
|
||||
service = get_provider_service()
|
||||
try:
|
||||
service.sync(subscription)
|
||||
except ProviderSyncError:
|
||||
pass
|
||||
except ProviderSyncError as exc:
|
||||
logger.exception("Newsletter provider sync failed: %s", exc)
|
||||
return redirect("/")
|
||||
|
||||
|
||||
def confirmation_token(email: str) -> str:
|
||||
return signing.dumps(email, salt="newsletter-confirm")
|
||||
|
||||
@@ -4,13 +4,20 @@ import os
|
||||
from pathlib import Path
|
||||
|
||||
import dj_database_url
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parents[2]
|
||||
|
||||
SECRET_KEY = os.getenv("SECRET_KEY", "unsafe-dev-secret")
|
||||
SECRET_KEY = os.getenv("SECRET_KEY")
|
||||
if not SECRET_KEY:
|
||||
raise ImproperlyConfigured("SECRET_KEY environment variable is required.")
|
||||
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
raise ImproperlyConfigured("DATABASE_URL environment variable is required.")
|
||||
DEBUG = os.getenv("DEBUG", "0") == "1"
|
||||
ALLOWED_HOSTS = [h.strip() for h in os.getenv("ALLOWED_HOSTS", "localhost,127.0.0.1").split(",") if h.strip()]
|
||||
|
||||
@@ -22,6 +29,7 @@ INSTALLED_APPS = [
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"django.contrib.sitemaps",
|
||||
"django.contrib.postgres",
|
||||
"taggit",
|
||||
"modelcluster",
|
||||
"wagtail.contrib.forms",
|
||||
@@ -39,6 +47,9 @@ INSTALLED_APPS = [
|
||||
"wagtail",
|
||||
"wagtailseo",
|
||||
"tailwind",
|
||||
"theme",
|
||||
"django_htmx",
|
||||
"apps.health",
|
||||
"apps.core",
|
||||
"apps.blog",
|
||||
"apps.authors",
|
||||
@@ -49,13 +60,16 @@ INSTALLED_APPS = [
|
||||
|
||||
MIDDLEWARE = [
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"apps.core.middleware.SecurityHeadersMiddleware",
|
||||
"whitenoise.middleware.WhiteNoiseMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"apps.core.middleware.AdminMessageGuardMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"django_htmx.middleware.HtmxMiddleware",
|
||||
"wagtail.contrib.redirects.middleware.RedirectMiddleware",
|
||||
"apps.core.middleware.ConsentMiddleware",
|
||||
]
|
||||
@@ -80,9 +94,7 @@ TEMPLATES = [
|
||||
|
||||
WSGI_APPLICATION = "config.wsgi.application"
|
||||
|
||||
DATABASES = {
|
||||
"default": dj_database_url.parse(os.getenv("DATABASE_URL", f"sqlite:///{BASE_DIR / 'db.sqlite3'}"))
|
||||
}
|
||||
DATABASES = {"default": dj_database_url.parse(DATABASE_URL)}
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"},
|
||||
@@ -130,7 +142,30 @@ CACHES = {
|
||||
X_FRAME_OPTIONS = "SAMEORIGIN"
|
||||
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
X_CONTENT_TYPE_OPTIONS = "nosniff"
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = [u for u in os.getenv("CSRF_TRUSTED_ORIGINS", "http://localhost:8035").split(",") if u]
|
||||
TRUSTED_PROXY_IPS = [ip.strip() for ip in os.getenv("TRUSTED_PROXY_IPS", "").split(",") if ip.strip()]
|
||||
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
STORAGES = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||
},
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
}
|
||||
|
||||
TAILWIND_APP_NAME = "theme"
|
||||
|
||||
# Cloudflare Turnstile (comment spam protection)
|
||||
TURNSTILE_SITE_KEY = os.getenv("TURNSTILE_SITE_KEY", "")
|
||||
TURNSTILE_SECRET_KEY = os.getenv("TURNSTILE_SECRET_KEY", "")
|
||||
TURNSTILE_EXPECTED_HOSTNAME = os.getenv("TURNSTILE_EXPECTED_HOSTNAME", "")
|
||||
|
||||
WAGTAILSEARCH_BACKENDS = {
|
||||
"default": {
|
||||
"BACKEND": "wagtail.search.backends.database",
|
||||
"SEARCH_CONFIG": "english",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,21 @@ DEBUG = True
|
||||
|
||||
INTERNAL_IPS = ["127.0.0.1"]
|
||||
|
||||
# Drop WhiteNoise in dev — it serves from STATIC_ROOT which is empty without
|
||||
# collectstatic, so it 404s every asset. Django's runserver serves static and
|
||||
# media files natively when DEBUG=True (via django.contrib.staticfiles + the
|
||||
# media URL pattern in urls.py).
|
||||
MIDDLEWARE = [m for m in MIDDLEWARE if m != "whitenoise.middleware.WhiteNoiseMiddleware"]
|
||||
STORAGES = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||
},
|
||||
"staticfiles": {
|
||||
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
try:
|
||||
import debug_toolbar # noqa: F401
|
||||
|
||||
@@ -11,3 +26,5 @@ try:
|
||||
MIDDLEWARE = ["debug_toolbar.middleware.DebugToolbarMiddleware", *MIDDLEWARE]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
COMMENT_RATE_LIMIT_PER_MINUTE = 100
|
||||
|
||||
@@ -2,8 +2,16 @@ from .base import * # noqa
|
||||
|
||||
DEBUG = False
|
||||
|
||||
# Behind Caddy: trust the forwarded proto header so Django knows it's HTTPS.
|
||||
# SECURE_SSL_REDIRECT is intentionally off — Caddy handles HTTPS redirects
|
||||
# before the request reaches Django; enabling it here causes redirect loops.
|
||||
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
||||
USE_X_FORWARDED_HOST = True
|
||||
SECURE_SSL_REDIRECT = True
|
||||
SECURE_SSL_REDIRECT = False
|
||||
SESSION_COOKIE_SECURE = True
|
||||
CSRF_COOKIE_SECURE = True
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = [
|
||||
"https://nohypeai.net",
|
||||
"https://www.nohypeai.net",
|
||||
]
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
from django.conf import settings
|
||||
from django.conf.urls.static import static
|
||||
from django.contrib import admin
|
||||
from django.urls import include, path
|
||||
from django.views.generic import RedirectView
|
||||
from wagtail import urls as wagtail_urls
|
||||
from wagtail.contrib.sitemaps.views import sitemap
|
||||
|
||||
from apps.blog.feeds import AllArticlesFeed, TagArticlesFeed
|
||||
from apps.blog.feeds import AllArticlesFeed, CategoryArticlesFeed, TagArticlesFeed
|
||||
from apps.blog.views import search as search_view
|
||||
from apps.core.views import consent_view, robots_txt
|
||||
|
||||
urlpatterns = [
|
||||
@@ -12,12 +15,18 @@ urlpatterns = [
|
||||
path("cms/", include("wagtail.admin.urls")),
|
||||
path("documents/", include("wagtail.documents.urls")),
|
||||
path("comments/", include("apps.comments.urls")),
|
||||
path("health/", include("apps.health.urls")),
|
||||
path("newsletter/", include("apps.newsletter.urls")),
|
||||
path("consent/", consent_view, name="consent"),
|
||||
path("robots.txt", robots_txt, name="robots_txt"),
|
||||
path("feed/", AllArticlesFeed(), name="rss_feed"),
|
||||
path("feed/category/<slug:category_slug>/", CategoryArticlesFeed(), name="rss_feed_by_category"),
|
||||
path("feed/tag/<slug:tag_slug>/", TagArticlesFeed(), name="rss_feed_by_tag"),
|
||||
path("sitemap.xml", sitemap),
|
||||
path("admin/", RedirectView.as_view(url="/cms/", permanent=False)),
|
||||
path("search/", search_view, name="search"),
|
||||
path("", include(wagtail_urls)),
|
||||
]
|
||||
|
||||
if settings.DEBUG:
|
||||
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
|
||||
26
deploy/caddy/nohype.caddy
Normal file
26
deploy/caddy/nohype.caddy
Normal file
@@ -0,0 +1,26 @@
|
||||
www.nohypeai.net {
|
||||
redir https://nohypeai.net{uri} permanent
|
||||
}
|
||||
|
||||
nohypeai.net {
|
||||
encode gzip zstd
|
||||
|
||||
header {
|
||||
X-Content-Type-Options nosniff
|
||||
Referrer-Policy strict-origin-when-cross-origin
|
||||
Permissions-Policy "geolocation=(), microphone=(), camera=()"
|
||||
X-Forwarded-Proto https
|
||||
}
|
||||
|
||||
handle_path /static/* {
|
||||
root * /srv/sum/nohype/static
|
||||
file_server
|
||||
}
|
||||
|
||||
handle_path /media/* {
|
||||
root * /srv/sum/nohype/media
|
||||
file_server
|
||||
}
|
||||
|
||||
reverse_proxy localhost:8001
|
||||
}
|
||||
37
deploy/deploy.sh
Executable file
37
deploy/deploy.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
# Deploy script for No Hype AI — runs on lintel-prod-01 as deploy user.
|
||||
# Called by CI after a successful push to main.
|
||||
set -euo pipefail
|
||||
|
||||
SITE_DIR=/srv/sum/nohype
|
||||
APP_DIR=${SITE_DIR}/app
|
||||
|
||||
cd "${SITE_DIR}"
|
||||
|
||||
echo "==> Pulling latest code"
|
||||
git -C "${APP_DIR}" pull origin main
|
||||
|
||||
GIT_SHA=$(git -C "${APP_DIR}" rev-parse --short HEAD)
|
||||
BUILD_ID="build-$(date +%Y%m%d)-${GIT_SHA}"
|
||||
export GIT_SHA BUILD_ID
|
||||
|
||||
echo "==> Updating compose file"
|
||||
cp "${APP_DIR}/docker-compose.prod.yml" "${SITE_DIR}/docker-compose.prod.yml"
|
||||
|
||||
echo "==> Ensuring static/media directories exist"
|
||||
mkdir -p "${SITE_DIR}/static" "${SITE_DIR}/media"
|
||||
|
||||
echo "==> Rebuilding and recreating web container"
|
||||
docker compose -f "${SITE_DIR}/docker-compose.prod.yml" up -d --no-deps --build --force-recreate web
|
||||
|
||||
echo "==> Waiting for health check"
|
||||
for i in $(seq 1 30); do
|
||||
if curl -fsS -H "Host: nohypeai.net" http://localhost:8001/health/ >/dev/null 2>&1; then
|
||||
echo "==> Site is up"
|
||||
exit 0
|
||||
fi
|
||||
sleep 3
|
||||
done
|
||||
echo "ERROR: site did not come up after 90s" >&2
|
||||
docker compose -f "${SITE_DIR}/docker-compose.prod.yml" logs --tail=50 web
|
||||
exit 1
|
||||
28
deploy/entrypoint.prod.sh
Executable file
28
deploy/entrypoint.prod.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
python manage.py tailwind install --no-input
|
||||
python manage.py tailwind build
|
||||
python manage.py migrate --noinput
|
||||
python manage.py collectstatic --noinput
|
||||
python manage.py update_index
|
||||
|
||||
# Set Wagtail site hostname from WAGTAILADMIN_BASE_URL when available.
|
||||
# This keeps preview/page URLs on the same origin as the admin host.
|
||||
python manage.py shell -c "
|
||||
from wagtail.models import Site
|
||||
import os
|
||||
from urllib.parse import urlparse
|
||||
|
||||
admin_base = os.environ.get('WAGTAILADMIN_BASE_URL', '').strip()
|
||||
parsed = urlparse(admin_base) if admin_base else None
|
||||
hostname = parsed.hostname if parsed and parsed.hostname else os.environ.get('ALLOWED_HOSTS', 'localhost').split(',')[0].strip()
|
||||
Site.objects.update(hostname=hostname, port=443, site_name='No Hype AI')
|
||||
"
|
||||
|
||||
exec gunicorn config.wsgi:application \
|
||||
--workers 3 \
|
||||
--bind 0.0.0.0:8000 \
|
||||
--access-logfile - \
|
||||
--error-logfile - \
|
||||
--capture-output
|
||||
26
deploy/sum-nohype.service
Normal file
26
deploy/sum-nohype.service
Normal file
@@ -0,0 +1,26 @@
|
||||
[Unit]
|
||||
Description=No Hype AI (Docker Compose)
|
||||
Requires=docker.service
|
||||
After=docker.service network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=deploy
|
||||
Group=www-data
|
||||
WorkingDirectory=/srv/sum/nohype
|
||||
|
||||
ExecStartPre=docker compose -f docker-compose.prod.yml pull --ignore-pull-failures
|
||||
ExecStart=docker compose -f docker-compose.prod.yml up --build
|
||||
ExecStop=docker compose -f docker-compose.prod.yml down
|
||||
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
TimeoutStartSec=300
|
||||
TimeoutStopSec=30
|
||||
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=sum-nohype
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
7
docker-compose.ci.yml
Normal file
7
docker-compose.ci.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
services:
|
||||
web:
|
||||
volumes: []
|
||||
ports: []
|
||||
|
||||
db:
|
||||
ports: []
|
||||
42
docker-compose.prod.yml
Normal file
42
docker-compose.prod.yml
Normal file
@@ -0,0 +1,42 @@
|
||||
services:
|
||||
web:
|
||||
build:
|
||||
context: app
|
||||
args:
|
||||
GIT_SHA: ${GIT_SHA:-unknown}
|
||||
BUILD_ID: ${BUILD_ID:-unknown}
|
||||
working_dir: /app
|
||||
command: /app/deploy/entrypoint.prod.sh
|
||||
env_file: .env
|
||||
environment:
|
||||
BACKUP_STATUS_FILE: /srv/sum/nohype/backup_status
|
||||
DJANGO_SETTINGS_MODULE: config.settings.production
|
||||
volumes:
|
||||
- /srv/sum/nohype:/srv/sum/nohype:ro
|
||||
- /srv/sum/nohype/static:/app/staticfiles
|
||||
- /srv/sum/nohype/media:/app/media
|
||||
ports:
|
||||
- "127.0.0.1:8001:8000"
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
db:
|
||||
image: postgres:16-alpine
|
||||
env_file: .env
|
||||
environment:
|
||||
POSTGRES_DB: nohype
|
||||
POSTGRES_USER: nohype
|
||||
volumes:
|
||||
- nohype_pg:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U nohype -d nohype"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
nohype_pg:
|
||||
@@ -1,31 +1,48 @@
|
||||
services:
|
||||
web:
|
||||
build: .
|
||||
container_name: nohype-web
|
||||
command: python manage.py runserver 0.0.0.0:8000
|
||||
working_dir: /app
|
||||
command: >
|
||||
sh -c "python manage.py tailwind install --no-input &&
|
||||
python manage.py tailwind build &&
|
||||
python manage.py migrate --noinput &&
|
||||
python manage.py seed_e2e_content &&
|
||||
python manage.py runserver 0.0.0.0:8000"
|
||||
volumes:
|
||||
- .:/app
|
||||
ports:
|
||||
- "8035:8000"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
SECRET_KEY: dev-secret-key
|
||||
DEBUG: "1"
|
||||
ALLOWED_HOSTS: localhost,127.0.0.1,web
|
||||
WAGTAIL_SITE_NAME: No Hype AI
|
||||
DATABASE_URL: postgres://nohype:nohype@db:5432/nohype
|
||||
DJANGO_SETTINGS_MODULE: config.settings.development
|
||||
WAGTAILADMIN_BASE_URL: http://localhost:8035
|
||||
CONSENT_POLICY_VERSION: "1"
|
||||
EMAIL_BACKEND: django.core.mail.backends.console.EmailBackend
|
||||
DEFAULT_FROM_EMAIL: hello@nohypeai.com
|
||||
NEWSLETTER_PROVIDER: buttondown
|
||||
E2E_MODE: "1"
|
||||
depends_on:
|
||||
- db
|
||||
db:
|
||||
condition: service_healthy
|
||||
|
||||
db:
|
||||
image: postgres:16-alpine
|
||||
container_name: nohype-db
|
||||
environment:
|
||||
POSTGRES_DB: nohype
|
||||
POSTGRES_USER: nohype
|
||||
POSTGRES_PASSWORD: nohype
|
||||
ports:
|
||||
- "5545:5432"
|
||||
volumes:
|
||||
- nohype_pg:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U nohype -d nohype"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 5s
|
||||
|
||||
volumes:
|
||||
nohype_pg:
|
||||
|
||||
0
e2e/__init__.py
Normal file
0
e2e/__init__.py
Normal file
57
e2e/conftest.py
Normal file
57
e2e/conftest.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Shared fixtures for E2E Playwright tests.
|
||||
|
||||
All tests in this directory require a running application server pointed to by
|
||||
the E2E_BASE_URL environment variable. Tests are automatically skipped when
|
||||
the variable is absent, making them safe to collect in any environment.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from collections.abc import Generator
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Browser, BrowserContext, Page, sync_playwright
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def base_url() -> str:
|
||||
url = os.getenv("E2E_BASE_URL", "").rstrip("/")
|
||||
if not url:
|
||||
pytest.skip("E2E_BASE_URL not set – start a server and export E2E_BASE_URL to run E2E tests")
|
||||
return url
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def _browser(base_url: str) -> Generator[Browser, None, None]: # noqa: ARG001
|
||||
"""Session-scoped Chromium instance (headless)."""
|
||||
with sync_playwright() as pw:
|
||||
browser = pw.chromium.launch(headless=True)
|
||||
yield browser
|
||||
browser.close()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def page(_browser: Browser) -> Generator[Page, None, None]:
|
||||
"""Fresh browser context + page per test — no shared state between tests.
|
||||
|
||||
Clipboard permissions are pre-granted so copy-link and similar interactions
|
||||
work in headless Chromium without triggering the permissions dialog.
|
||||
"""
|
||||
ctx: BrowserContext = _browser.new_context(
|
||||
permissions=["clipboard-read", "clipboard-write"],
|
||||
)
|
||||
# Polyfill clipboard in environments where the native API is unavailable
|
||||
# (e.g. non-HTTPS Docker CI). The polyfill stores writes in a variable so
|
||||
# the JS success path still runs and button text updates as expected.
|
||||
ctx.add_init_script("""
|
||||
if (!navigator.clipboard || !navigator.clipboard.writeText) {
|
||||
Object.defineProperty(navigator, 'clipboard', {
|
||||
value: { writeText: () => Promise.resolve() },
|
||||
configurable: true,
|
||||
});
|
||||
}
|
||||
""")
|
||||
pg: Page = ctx.new_page()
|
||||
yield pg
|
||||
ctx.close()
|
||||
56
e2e/test_admin_experience.py
Normal file
56
e2e/test_admin_experience.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""E2E tests for Wagtail admin editor experience improvements."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
def admin_login(page: Page, base_url: str) -> None:
|
||||
"""Log in to the Wagtail admin using the seeded E2E admin user."""
|
||||
page.goto(f"{base_url}/cms/login/", wait_until="networkidle")
|
||||
page.fill('input[name="username"]', "e2e-admin")
|
||||
page.fill('input[name="password"]', "e2e-admin-pass")
|
||||
page.click('button[type="submit"]')
|
||||
page.wait_for_load_state("networkidle")
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_articles_menu_item_visible(page: Page, base_url: str) -> None:
|
||||
"""The admin sidebar should contain an 'Articles' menu item."""
|
||||
admin_login(page, base_url)
|
||||
sidebar = page.locator("#wagtail-sidebar")
|
||||
articles_link = sidebar.get_by_role("link", name="Articles")
|
||||
expect(articles_link).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_articles_listing_page_loads(page: Page, base_url: str) -> None:
|
||||
"""Clicking 'Articles' should load the articles listing with seeded articles."""
|
||||
admin_login(page, base_url)
|
||||
page.goto(f"{base_url}/cms/articles/", wait_until="networkidle")
|
||||
expect(page.get_by_role("heading").first).to_be_visible()
|
||||
# Seeded articles should appear
|
||||
expect(page.get_by_text("Nightly Playwright Journey")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_dashboard_has_articles_panel(page: Page, base_url: str) -> None:
|
||||
"""The admin dashboard should include the articles summary panel."""
|
||||
admin_login(page, base_url)
|
||||
page.goto(f"{base_url}/cms/", wait_until="networkidle")
|
||||
expect(page.get_by_text("Articles overview")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_editor_has_tabs(page: Page, base_url: str) -> None:
|
||||
"""The article editor should have Content, Metadata, Publishing, and SEO tabs."""
|
||||
admin_login(page, base_url)
|
||||
page.goto(f"{base_url}/cms/articles/", wait_until="networkidle")
|
||||
# Click the first article title link to edit it
|
||||
page.get_by_role("link", name="Nightly Playwright Journey").first.click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
expect(page.get_by_role("tab", name="Content")).to_be_visible()
|
||||
expect(page.get_by_role("tab", name="Metadata")).to_be_visible()
|
||||
expect(page.get_by_role("tab", name="Publishing")).to_be_visible()
|
||||
expect(page.get_by_role("tab", name="SEO")).to_be_visible()
|
||||
72
e2e/test_article_detail.py
Normal file
72
e2e/test_article_detail.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""E2E tests for article detail pages."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
ARTICLE_SLUG = "nightly-playwright-journey"
|
||||
|
||||
|
||||
def _go_to_article(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/articles/{ARTICLE_SLUG}/", wait_until="networkidle")
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_title_visible(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
h1 = page.get_by_role("heading", level=1)
|
||||
expect(h1).to_be_visible()
|
||||
assert h1.inner_text().strip() != ""
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_read_time_visible(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
# Read time is rendered as "N min read"
|
||||
expect(page.get_by_text("min read")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_share_section_present(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
share_section = page.get_by_role("region", name="Share this article")
|
||||
expect(share_section).to_be_visible()
|
||||
expect(share_section.get_by_role("link", name="Share on X")).to_be_visible()
|
||||
expect(share_section.get_by_role("link", name="Share on LinkedIn")).to_be_visible()
|
||||
expect(share_section.get_by_role("button", name="Copy link")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_comments_section_present(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
# The article has comments_enabled=True
|
||||
expect(page.get_by_role("heading", name="Comments", exact=True)).to_be_visible()
|
||||
expect(page.get_by_role("button", name="Post comment")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_newsletter_aside_present(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
# There's a Newsletter aside within the article page
|
||||
aside = page.locator("aside")
|
||||
expect(aside).to_be_visible()
|
||||
expect(aside.locator('input[type="email"]')).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_related_section_present(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
# Related section heading
|
||||
expect(page.get_by_role("heading", name="Related")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_copy_link_button_updates_text(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
copy_btn = page.locator("[data-copy-link]")
|
||||
expect(copy_btn).to_be_visible()
|
||||
# Force-override clipboard so writeText always resolves, even in non-HTTPS headless context
|
||||
page.evaluate("navigator.clipboard.writeText = () => Promise.resolve()")
|
||||
copy_btn.click()
|
||||
expect(copy_btn).to_have_text("Copied")
|
||||
59
e2e/test_articles.py
Normal file
59
e2e/test_articles.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""E2E tests for the article index page."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_index_loads(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/articles/", wait_until="networkidle")
|
||||
expect(page.get_by_role("heading", level=1)).to_be_visible()
|
||||
# At least one article card must be present after seeding
|
||||
expect(page.locator("main article").first).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_tag_filter_shows_tagged_articles(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/articles/", wait_until="networkidle")
|
||||
# The seeded "AI Tools" tag link must be present
|
||||
tag_link = page.get_by_role("link", name="AI Tools")
|
||||
expect(tag_link).to_be_visible()
|
||||
tag_link.click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
|
||||
# URL should now contain ?tag=ai-tools
|
||||
assert "tag=ai-tools" in page.url
|
||||
|
||||
# The tagged article must appear; no-tag articles may be absent
|
||||
expect(page.get_by_text("Tagged Article")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_all_tag_clears_filter(page: Page, base_url: str) -> None:
|
||||
# Start with the tag filter applied
|
||||
page.goto(f"{base_url}/articles/?tag=ai-tools", wait_until="networkidle")
|
||||
|
||||
# Clicking "All" should return to unfiltered list
|
||||
page.get_by_role("link", name="All").click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
assert "tag=" not in page.url
|
||||
# All seeded articles should now be visible
|
||||
expect(page.locator("main article").first).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_article_card_navigates_to_detail(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/articles/", wait_until="networkidle")
|
||||
first_link = page.locator("main article a").first
|
||||
expect(first_link).to_be_visible()
|
||||
|
||||
href = first_link.get_attribute("href")
|
||||
assert href, "Article card must have an href"
|
||||
|
||||
first_link.click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
|
||||
# We should be on an article detail page
|
||||
expect(page.get_by_role("heading", level=1)).to_be_visible()
|
||||
115
e2e/test_comments.py
Normal file
115
e2e/test_comments.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""E2E tests for the comment submission flow."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
ARTICLE_SLUG = "nightly-playwright-journey"
|
||||
|
||||
|
||||
def _go_to_article(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/articles/{ARTICLE_SLUG}/", wait_until="networkidle")
|
||||
|
||||
|
||||
def _submit_comment(page: Page, *, name: str = "E2E Tester", email: str = "e2e@example.com", body: str) -> None:
|
||||
"""Fill and submit the main (non-reply) comment form."""
|
||||
form = page.locator("form[data-comment-form]")
|
||||
form.locator('input[name="author_name"]').fill(name)
|
||||
form.locator('input[name="author_email"]').fill(email)
|
||||
form.locator('textarea[name="body"]').fill(body)
|
||||
form.get_by_role("button", name="Post comment").click()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_valid_comment_shows_moderation_message(page: Page, base_url: str) -> None:
|
||||
"""Successful comment submission must show the awaiting-moderation message."""
|
||||
_go_to_article(page, base_url)
|
||||
_submit_comment(page, body="This is a test comment from Playwright.")
|
||||
|
||||
# HTMX swaps the form container inline — wait for the moderation message
|
||||
expect(page.get_by_text("awaiting moderation")).to_be_visible(timeout=10_000)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_valid_comment_not_immediately_visible(page: Page, base_url: str) -> None:
|
||||
"""Submitted comment must NOT appear in the comments list before moderation."""
|
||||
_go_to_article(page, base_url)
|
||||
unique_body = "Unique unmoderated comment body xq7z"
|
||||
_submit_comment(page, body=unique_body)
|
||||
|
||||
# Wait for HTMX response to settle
|
||||
expect(page.get_by_text("awaiting moderation")).to_be_visible(timeout=10_000)
|
||||
expect(page.get_by_text(unique_body)).not_to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_empty_body_shows_form_errors(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
_submit_comment(page, body=" ") # whitespace-only body
|
||||
page.wait_for_load_state("networkidle")
|
||||
|
||||
expect(page.locator('[aria-label="Comment form errors"]')).to_be_visible(timeout=10_000)
|
||||
assert "commented=1" not in page.url
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_missing_name_shows_form_errors(page: Page, base_url: str) -> None:
|
||||
_go_to_article(page, base_url)
|
||||
|
||||
form = page.locator("form[data-comment-form]")
|
||||
form.locator('input[name="author_name"]').fill("")
|
||||
form.locator('input[name="author_email"]').fill("e2e@example.com")
|
||||
form.locator('textarea[name="body"]').fill("Comment without a name.")
|
||||
form.get_by_role("button", name="Post comment").click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
|
||||
assert "commented=1" not in page.url
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_reply_form_visible_on_approved_comment(page: Page, base_url: str) -> None:
|
||||
"""An approved seeded comment must display a reply form."""
|
||||
_go_to_article(page, base_url)
|
||||
|
||||
# The seeded approved comment should be visible (as author name)
|
||||
expect(page.get_by_text("E2E Approved Commenter", exact=True)).to_be_visible()
|
||||
# And a Reply toggle for it
|
||||
expect(page.locator("summary").filter(has_text="Reply")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_reply_submission_shows_moderation_message(page: Page, base_url: str) -> None:
|
||||
"""Submitting a reply to an approved comment should show moderation message."""
|
||||
_go_to_article(page, base_url)
|
||||
|
||||
# Click the Reply toggle (summary element)
|
||||
page.locator("summary").filter(has_text="Reply").first.click()
|
||||
|
||||
# The reply form should now be visible
|
||||
post_reply_btn = page.get_by_test_id("post-reply-btn").first
|
||||
expect(post_reply_btn).to_be_visible()
|
||||
|
||||
# Fill the form fields
|
||||
# Use a locator that finds the container for this reply form (the details element)
|
||||
reply_container = page.locator("details").filter(has=post_reply_btn).first
|
||||
reply_container.locator('input[name="author_name"]').fill("E2E Replier")
|
||||
reply_container.locator('input[name="author_email"]').fill("replier@example.com")
|
||||
reply_container.locator('textarea[name="body"]').fill("This is a test reply.")
|
||||
post_reply_btn.click()
|
||||
|
||||
# HTMX swaps the reply form container inline
|
||||
expect(page.get_by_text("awaiting moderation")).to_be_visible(timeout=10_000)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_comments_section_absent_when_disabled(page: Page, base_url: str) -> None:
|
||||
"""Article with comments_enabled=False must not show the comments section."""
|
||||
response = page.goto(f"{base_url}/articles/e2e-no-comments/", wait_until="networkidle")
|
||||
assert response is not None and response.status == 200, (
|
||||
f"Expected 200 for e2e-no-comments article, got {response and response.status}"
|
||||
)
|
||||
expect(page.get_by_role("heading", level=1)).to_have_text("No Comments Article")
|
||||
expect(page.get_by_role("heading", name="Comments", exact=True)).to_have_count(0)
|
||||
expect(page.get_by_role("button", name="Post comment")).to_have_count(0)
|
||||
|
||||
70
e2e/test_cookie_consent.py
Normal file
70
e2e/test_cookie_consent.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""E2E tests for the cookie consent banner."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
def _open_fresh_page(page: Page, url: str) -> None:
|
||||
"""Navigate to URL with no existing consent cookie (fresh context guarantees this)."""
|
||||
page.goto(url, wait_until="networkidle")
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_banner_visible_on_first_visit(page: Page, base_url: str) -> None:
|
||||
_open_fresh_page(page, f"{base_url}/")
|
||||
expect(page.locator("#cookie-banner")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_accept_all_dismisses_banner(page: Page, base_url: str) -> None:
|
||||
_open_fresh_page(page, f"{base_url}/")
|
||||
banner = page.locator("#cookie-banner")
|
||||
expect(banner).to_be_visible()
|
||||
page.get_by_role("button", name="Accept all").first.click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
expect(banner).to_have_count(0)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_reject_all_dismisses_banner(page: Page, base_url: str) -> None:
|
||||
_open_fresh_page(page, f"{base_url}/")
|
||||
banner = page.locator("#cookie-banner")
|
||||
expect(banner).to_be_visible()
|
||||
page.get_by_role("button", name="Reject all").first.click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
expect(banner).to_have_count(0)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_granular_preferences_save_dismisses_banner(page: Page, base_url: str) -> None:
|
||||
_open_fresh_page(page, f"{base_url}/")
|
||||
banner = page.locator("#cookie-banner")
|
||||
expect(banner).to_be_visible()
|
||||
|
||||
# Click the <summary> element to expand <details> inside the banner
|
||||
banner.locator("details summary").click()
|
||||
|
||||
# Analytics checkbox is now revealed; check it and save
|
||||
analytics_checkbox = banner.locator('input[name="analytics"]')
|
||||
expect(analytics_checkbox).to_be_visible()
|
||||
analytics_checkbox.check()
|
||||
|
||||
# Submit granular preferences
|
||||
page.get_by_role("button", name="Save preferences").click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
expect(banner).to_have_count(0)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_banner_absent_after_consent_cookie_set(page: Page, base_url: str) -> None:
|
||||
"""After accepting consent, subsequent page loads must not show the banner."""
|
||||
_open_fresh_page(page, f"{base_url}/")
|
||||
# Accept consent
|
||||
page.get_by_role("button", name="Accept all").first.click()
|
||||
page.wait_for_load_state("networkidle")
|
||||
|
||||
# Navigate to another page in the same context — cookie should persist
|
||||
page.goto(f"{base_url}/articles/", wait_until="networkidle")
|
||||
expect(page.locator("#cookie-banner")).to_have_count(0)
|
||||
61
e2e/test_feeds.py
Normal file
61
e2e/test_feeds.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""E2E tests for RSS feed, sitemap, and robots.txt."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_rss_feed_returns_valid_xml(page: Page, base_url: str) -> None:
|
||||
response = page.goto(f"{base_url}/feed/", wait_until="networkidle")
|
||||
assert response is not None
|
||||
assert response.status == 200
|
||||
content = page.content()
|
||||
assert "<rss" in content or "<feed" in content or "<rss" in content or "<feed" in content, (
|
||||
"RSS feed response must contain a <rss or <feed root element"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_rss_feed_contains_seeded_article(page: Page, base_url: str) -> None:
|
||||
response = page.goto(f"{base_url}/feed/", wait_until="networkidle")
|
||||
assert response is not None and response.status == 200
|
||||
content = page.content()
|
||||
assert "Nightly Playwright Journey" in content, "Seeded article title must appear in the feed"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_sitemap_returns_valid_xml(page: Page, base_url: str) -> None:
|
||||
response = page.goto(f"{base_url}/sitemap.xml", wait_until="networkidle")
|
||||
assert response is not None
|
||||
assert response.status == 200
|
||||
content = page.content()
|
||||
assert "urlset" in content or "<urlset" in content, "Sitemap must contain urlset element"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_sitemap_contains_article_url(page: Page, base_url: str) -> None:
|
||||
response = page.goto(f"{base_url}/sitemap.xml", wait_until="networkidle")
|
||||
assert response is not None and response.status == 200
|
||||
content = page.content()
|
||||
assert "nightly-playwright-journey" in content, "Seeded article URL must appear in sitemap"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_robots_txt_is_accessible(page: Page, base_url: str) -> None:
|
||||
response = page.goto(f"{base_url}/robots.txt", wait_until="networkidle")
|
||||
assert response is not None
|
||||
assert response.status == 200
|
||||
content = page.content()
|
||||
assert "User-agent" in content, "robots.txt must contain User-agent directive"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_tag_rss_feed(page: Page, base_url: str) -> None:
|
||||
"""Tag-specific feed must return 200 and valid XML for a seeded tag."""
|
||||
response = page.goto(f"{base_url}/feed/tag/ai-tools/", wait_until="networkidle")
|
||||
assert response is not None
|
||||
assert response.status == 200
|
||||
content = page.content()
|
||||
assert "<rss" in content or "<feed" in content or "<rss" in content or "<feed" in content
|
||||
52
e2e/test_home.py
Normal file
52
e2e/test_home.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""E2E tests for the home page."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_homepage_title_contains_brand(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
expect(page).to_have_title(re.compile("No Hype AI"))
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_nav_links_present(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
nav = page.locator("nav")
|
||||
expect(nav.get_by_role("link", name="Home")).to_be_visible()
|
||||
expect(nav.get_by_role("link", name="Articles")).to_be_visible()
|
||||
expect(nav.get_by_role("link", name="About")).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_theme_toggle_adds_dark_class(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
toggle = page.get_by_role("button", name="Toggle theme")
|
||||
expect(toggle).to_be_visible()
|
||||
# Initial state: html may or may not have dark class
|
||||
html = page.locator("html")
|
||||
before = "dark" in (html.get_attribute("class") or "")
|
||||
toggle.click()
|
||||
after = "dark" in (html.get_attribute("class") or "")
|
||||
assert before != after, "Theme toggle must flip the dark class on <html>"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_nav_search_box_present(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
nav = page.locator("nav")
|
||||
expect(nav.locator('input[name="q"]')).to_be_visible()
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_home_shows_articles(page: Page, base_url: str) -> None:
|
||||
"""Latest articles section is populated after seeding."""
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
# Seeded content means there should be at least one article card link
|
||||
article_links = page.locator("main article a")
|
||||
expect(article_links.first).to_be_visible()
|
||||
66
e2e/test_newsletter.py
Normal file
66
e2e/test_newsletter.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""E2E tests for the newsletter subscription form."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
def _nav_newsletter_form(page: Page):
|
||||
"""Return the newsletter form in the home page sidebar aside."""
|
||||
return page.locator("aside").locator("form[data-newsletter-form]").first
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_subscribe_valid_email_shows_confirmation(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
form = _nav_newsletter_form(page)
|
||||
form.locator('input[type="email"]').fill("playwright-test@example.com")
|
||||
form.get_by_role("button", name="Subscribe").click()
|
||||
|
||||
# JS sets the data-newsletter-message text on success
|
||||
message = form.locator("[data-newsletter-message]")
|
||||
expect(message).to_have_text("Check your email to confirm your subscription.", timeout=5_000)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_subscribe_invalid_email_shows_error(page: Page, base_url: str) -> None:
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
form = _nav_newsletter_form(page)
|
||||
# Disable the browser's native HTML5 email validation so the JS handler
|
||||
# fires and sends the bad value to the server (which returns 400).
|
||||
page.evaluate("document.querySelector('aside form[data-newsletter-form]').setAttribute('novalidate', '')")
|
||||
form.locator('input[type="email"]').fill("not-an-email")
|
||||
form.get_by_role("button", name="Subscribe").click()
|
||||
|
||||
message = form.locator("[data-newsletter-message]")
|
||||
expect(message).to_have_text("Please enter a valid email.", timeout=5_000)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_subscribe_from_article_aside(page: Page, base_url: str) -> None:
|
||||
"""Newsletter form embedded in the article aside also works."""
|
||||
page.goto(f"{base_url}/articles/nightly-playwright-journey/", wait_until="networkidle")
|
||||
aside_form = page.locator("aside").locator("form[data-newsletter-form]")
|
||||
aside_form.locator('input[type="email"]').fill("aside-test@example.com")
|
||||
aside_form.get_by_role("button", name="Subscribe").click()
|
||||
|
||||
message = aside_form.locator("[data-newsletter-message]")
|
||||
expect(message).to_have_text("Check your email to confirm your subscription.", timeout=5_000)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
def test_subscribe_duplicate_email_still_shows_confirmation(page: Page, base_url: str) -> None:
|
||||
"""Submitting the same address twice must not expose an error to the user."""
|
||||
email = "dupe-e2e@example.com"
|
||||
page.goto(f"{base_url}/", wait_until="networkidle")
|
||||
form = _nav_newsletter_form(page)
|
||||
form.locator('input[type="email"]').fill(email)
|
||||
form.get_by_role("button", name="Subscribe").click()
|
||||
message = form.locator("[data-newsletter-message]")
|
||||
expect(message).to_have_text("Check your email to confirm your subscription.", timeout=5_000)
|
||||
|
||||
# Second submission — form resets after first, so fill again
|
||||
form.locator('input[type="email"]').fill(email)
|
||||
form.get_by_role("button", name="Subscribe").click()
|
||||
expect(message).to_have_text("Check your email to confirm your subscription.", timeout=5_000)
|
||||
@@ -179,8 +179,8 @@ Every milestone follows the **Red → Green → Refactor** cycle. No production
|
||||
### 3.3 Coverage Requirements
|
||||
|
||||
- **Minimum 90% line coverage** on all `apps/` code, enforced via `pytest-cov` in CI
|
||||
- Coverage reports generated on every push; PRs blocked below threshold
|
||||
- E2E tests run nightly, not on every push (they are slow)
|
||||
- Coverage reports generated on every pull request; PRs blocked below threshold
|
||||
- E2E tests run nightly, not on every pull request (they are slow)
|
||||
|
||||
### 3.4 Test Organisation
|
||||
|
||||
@@ -212,10 +212,10 @@ class ArticlePageFactory(wagtail_factories.PageFactory):
|
||||
# Note: no is_featured — featured article is set on HomePage.featured_article only
|
||||
```
|
||||
|
||||
### 3.6 CI Pipeline (GitHub Actions)
|
||||
### 3.6 CI Pipeline (Gitea Actions)
|
||||
|
||||
```
|
||||
on: [push, pull_request]
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -232,6 +232,8 @@ jobs:
|
||||
- Run Playwright suite
|
||||
```
|
||||
|
||||
Rationale: all merges should flow through pull requests. Running the same checks on both `push` and `pull_request` duplicates work and wastes compute.
|
||||
|
||||
---
|
||||
|
||||
## Milestone 0 — Project Scaffold & Tooling
|
||||
@@ -242,7 +244,7 @@ jobs:
|
||||
- `./manage.py runserver` starts without errors
|
||||
- `pytest` runs and exits 0 (no tests yet = trivially passing)
|
||||
- `ruff` and `mypy` pass on an empty codebase
|
||||
- GitHub Actions workflow file committed and green
|
||||
- Gitea Actions workflow file committed and green
|
||||
|
||||
### M0 — Tasks
|
||||
|
||||
@@ -271,7 +273,7 @@ jobs:
|
||||
- Add Prism.js and Alpine.js to `static/js/`; wire into `base.html`
|
||||
|
||||
#### M0.5 — CI
|
||||
- Create `.github/workflows/ci.yml`
|
||||
- Create `.gitea/workflows/ci.yml`
|
||||
- Install `pytest-django`, `pytest-cov`, `ruff`, `mypy`, `factory_boy`, `wagtail-factories`
|
||||
- Create `pytest.ini` / `pyproject.toml` config pointing at `config.settings.development`
|
||||
- Write the only M0 test: a trivial smoke test that asserts `1 == 1` to confirm CI runs
|
||||
|
||||
@@ -28,6 +28,10 @@ ignore_missing_imports = true
|
||||
module = ["apps.authors.models"]
|
||||
ignore_errors = true
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = ["apps.comments.views"]
|
||||
ignore_errors = true
|
||||
|
||||
[tool.django-stubs]
|
||||
django_settings_module = "config.settings.development"
|
||||
|
||||
|
||||
@@ -2,3 +2,5 @@
|
||||
DJANGO_SETTINGS_MODULE = config.settings.development
|
||||
python_files = test_*.py
|
||||
addopts = -q --cov=apps --cov-report=term-missing --cov-fail-under=90
|
||||
markers =
|
||||
e2e: browser-based end-to-end test suite for nightly jobs
|
||||
|
||||
@@ -2,7 +2,7 @@ Django~=5.2.0
|
||||
wagtail~=7.0.0
|
||||
wagtail-seo~=3.1.1
|
||||
psycopg2-binary~=2.9.0
|
||||
Pillow~=11.0.0
|
||||
Pillow~=12.1
|
||||
django-taggit~=6.0.0
|
||||
whitenoise~=6.0.0
|
||||
gunicorn~=23.0.0
|
||||
@@ -10,6 +10,8 @@ python-dotenv~=1.0.0
|
||||
dj-database-url~=2.2.0
|
||||
django-tailwind~=3.8.0
|
||||
django-csp~=3.8.0
|
||||
django-htmx~=1.21.0
|
||||
requests~=2.32.0
|
||||
pytest~=8.3.0
|
||||
pytest-django~=4.9.0
|
||||
pytest-cov~=5.0.0
|
||||
@@ -17,6 +19,8 @@ pytest-benchmark~=4.0.0
|
||||
factory-boy~=3.3.0
|
||||
wagtail-factories~=4.2.0
|
||||
feedparser~=6.0.0
|
||||
playwright~=1.57.0
|
||||
pytest-playwright~=0.7.0
|
||||
ruff~=0.6.0
|
||||
mypy~=1.11.0
|
||||
django-stubs~=5.1.0
|
||||
|
||||
4
static/favicon.svg
Normal file
4
static/favicon.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32">
|
||||
<rect width="32" height="32" fill="#09090b"/>
|
||||
<text x="16" y="24" text-anchor="middle" font-family="'Space Grotesk',sans-serif" font-weight="700" font-size="22" fill="#fafafa">/</text>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 257 B |
91
static/js/comments.js
Normal file
91
static/js/comments.js
Normal file
@@ -0,0 +1,91 @@
|
||||
(function () {
|
||||
function renderTurnstileWidgets(root) {
|
||||
if (!root || !window.turnstile || typeof window.turnstile.render !== "function") {
|
||||
return;
|
||||
}
|
||||
|
||||
const widgets = [];
|
||||
if (root.matches && root.matches(".cf-turnstile")) {
|
||||
widgets.push(root);
|
||||
}
|
||||
if (root.querySelectorAll) {
|
||||
widgets.push(...root.querySelectorAll(".cf-turnstile"));
|
||||
}
|
||||
|
||||
widgets.forEach(function (widget) {
|
||||
if (widget.dataset.turnstileRendered === "true") {
|
||||
return;
|
||||
}
|
||||
if (widget.querySelector("iframe")) {
|
||||
widget.dataset.turnstileRendered = "true";
|
||||
return;
|
||||
}
|
||||
|
||||
const sitekey = widget.dataset.sitekey;
|
||||
if (!sitekey) {
|
||||
return;
|
||||
}
|
||||
|
||||
const options = {
|
||||
sitekey: sitekey,
|
||||
theme: widget.dataset.theme || "auto",
|
||||
};
|
||||
if (widget.dataset.size) {
|
||||
options.size = widget.dataset.size;
|
||||
}
|
||||
if (widget.dataset.action) {
|
||||
options.action = widget.dataset.action;
|
||||
}
|
||||
if (widget.dataset.appearance) {
|
||||
options.appearance = widget.dataset.appearance;
|
||||
}
|
||||
|
||||
window.turnstile.render(widget, options);
|
||||
widget.dataset.turnstileRendered = "true";
|
||||
});
|
||||
}
|
||||
|
||||
function syncCommentsEmptyState() {
|
||||
const emptyState = document.getElementById("comments-empty-state");
|
||||
const commentsList = document.getElementById("comments-list");
|
||||
if (!emptyState || !commentsList) {
|
||||
return;
|
||||
}
|
||||
|
||||
const hasComments = commentsList.querySelector("[data-comment-item='true']") !== null;
|
||||
emptyState.classList.toggle("hidden", hasComments);
|
||||
}
|
||||
|
||||
function onTurnstileReady(root) {
|
||||
if (!window.turnstile || typeof window.turnstile.ready !== "function") {
|
||||
return;
|
||||
}
|
||||
window.turnstile.ready(function () {
|
||||
renderTurnstileWidgets(root || document);
|
||||
});
|
||||
}
|
||||
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
syncCommentsEmptyState();
|
||||
onTurnstileReady(document);
|
||||
});
|
||||
|
||||
document.addEventListener("htmx:afterSwap", function (event) {
|
||||
const target = event.detail && event.detail.target ? event.detail.target : document;
|
||||
syncCommentsEmptyState();
|
||||
onTurnstileReady(target);
|
||||
});
|
||||
|
||||
document.addEventListener("toggle", function (event) {
|
||||
const details = event.target;
|
||||
if (!details || details.tagName !== "DETAILS" || !details.open) {
|
||||
return;
|
||||
}
|
||||
onTurnstileReady(details);
|
||||
});
|
||||
|
||||
window.addEventListener("load", function () {
|
||||
syncCommentsEmptyState();
|
||||
onTurnstileReady(document);
|
||||
});
|
||||
})();
|
||||
1
static/js/htmx.min.js
vendored
Normal file
1
static/js/htmx.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
54
static/js/newsletter.js
Normal file
54
static/js/newsletter.js
Normal file
@@ -0,0 +1,54 @@
|
||||
(() => {
|
||||
const setMessage = (form, text) => {
|
||||
const target = form.querySelector("[data-newsletter-message]");
|
||||
if (target) {
|
||||
target.textContent = text;
|
||||
}
|
||||
};
|
||||
|
||||
const bindNewsletterForms = () => {
|
||||
const forms = document.querySelectorAll("form[data-newsletter-form]");
|
||||
forms.forEach((form) => {
|
||||
form.addEventListener("submit", async (event) => {
|
||||
event.preventDefault();
|
||||
const formData = new FormData(form);
|
||||
try {
|
||||
const response = await fetch(form.action, {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
});
|
||||
if (!response.ok) {
|
||||
setMessage(form, "Please enter a valid email.");
|
||||
return;
|
||||
}
|
||||
setMessage(form, "Check your email to confirm your subscription.");
|
||||
form.reset();
|
||||
} catch (error) {
|
||||
setMessage(form, "Subscription failed. Please try again.");
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const bindCopyLink = () => {
|
||||
const button = document.querySelector("[data-copy-link]");
|
||||
if (!button) {
|
||||
return;
|
||||
}
|
||||
button.addEventListener("click", async () => {
|
||||
const url = button.getAttribute("data-copy-url");
|
||||
if (!url) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await navigator.clipboard.writeText(url);
|
||||
button.textContent = "Copied";
|
||||
} catch (error) {
|
||||
button.textContent = "Copy failed";
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
bindNewsletterForms();
|
||||
bindCopyLink();
|
||||
})();
|
||||
@@ -4,4 +4,11 @@
|
||||
root.classList.toggle('dark');
|
||||
localStorage.setItem('theme', root.classList.contains('dark') ? 'dark' : 'light');
|
||||
};
|
||||
|
||||
document.addEventListener('DOMContentLoaded', function onReady() {
|
||||
const toggle = document.querySelector('[data-theme-toggle]');
|
||||
if (toggle) {
|
||||
toggle.addEventListener('click', window.toggleTheme);
|
||||
}
|
||||
});
|
||||
})();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user