Compare commits

..

64 Commits

Author SHA1 Message Date
f6d37bb1f2 fix: update Docker image source for condado-newsletter service
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 31s
2026-03-28 13:19:59 -03:00
2e2e75fe87 fix: update JwtService to handle default expiration and add tests for token generation
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 39s
2026-03-28 03:40:03 -03:00
8f508034d5 fix: update Docker configuration for image source and enhance logging in supervisord
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 14s
2026-03-28 03:32:08 -03:00
7108aff54d fix: add access and error log configuration for Nginx
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 39s
2026-03-28 03:26:50 -03:00
b0a4278699 fix: update stack deployment to use production Docker Compose file 2026-03-28 03:25:35 -03:00
73c51e514c fix: update Docker Compose configuration for service names and database connection
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 7s
2026-03-28 03:24:00 -03:00
596a17b252 fix: update supervisord configuration to log output to stdout
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 9s
2026-03-28 03:14:15 -03:00
5ff28fa3d4 fix: update homepage logo and href in Docker Compose configuration
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 12s
2026-03-28 03:09:35 -03:00
a672c9efed fix: correct stack name in Portainer deployment configuration
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 35s
2026-03-28 03:07:44 -03:00
bfe8965c06 fix: enhance Portainer API interaction with DNS fallback and improved error handling
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 11s
2026-03-28 03:06:37 -03:00
c72595d396 fix: improve Portainer deployment script with enhanced logging and error handling
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 11s
2026-03-28 03:05:05 -03:00
51b596c7a5 fix: update Portainer API URL and correct image reference in Docker Compose
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 12s
2026-03-28 03:03:33 -03:00
e4e2ae3479 fix: sanitize Portainer API stack response output for improved logging
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 12s
2026-03-28 02:58:37 -03:00
808c0d0a22 fix: update Portainer API URL to use the correct lab address
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 11s
2026-03-28 02:55:59 -03:00
e3938d2351 fix: add network info logging before Portainer deployment
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 11s
2026-03-28 02:53:05 -03:00
8a04363b11 fix: enhance Portainer API deployment with detailed error handling and logging
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 11s
2026-03-28 02:49:30 -03:00
1038f40721 fix: update Portainer API URL to include port number for deployment
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 7s
2026-03-28 02:30:01 -03:00
4fd90b2497 fix: streamline deployment process by removing Gitea registry login steps and enhancing Portainer API integration
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 16s
2026-03-28 02:21:36 -03:00
cb74fdef7b fix: remove Gitea container registry login and push steps from build workflow
All checks were successful
Build And Publish Production Image / Build And Publish Production Image (push) Successful in 7s
2026-03-28 01:34:08 -03:00
0ed6f3824a fix: update build workflow to combine tagging and pushing of registry images
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 17s
2026-03-28 01:27:09 -03:00
572dc49bc9 fix: update Docker image tag format in build workflow
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 1m37s
2026-03-28 00:49:03 -03:00
29627a0062 fix: correct syntax for Docker image tags in build workflow
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 16s
2026-03-28 00:42:16 -03:00
776941b323 fix: update Docker Hub login step to be optional and clean up registry login process
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 1m28s
2026-03-28 00:40:15 -03:00
5f8834d0d4 fix: update Docker registry configuration and login endpoint in build workflow
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 1m18s
2026-03-28 00:31:33 -03:00
854fabd874 fix: update logging of Docker registry credentials to use base64 encoding
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 22s
2026-03-27 22:28:45 -03:00
000bc0cc36 fix: update Docker registry credentials logging to use environment variables
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 12s
2026-03-27 22:27:51 -03:00
4d27a256d2 fix: update logging of Docker registry credentials to use secrets
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 21s
2026-03-27 22:26:12 -03:00
08bfced7ce fix: add logging for Docker registry login credentials in build workflow
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 7s
2026-03-27 22:25:18 -03:00
c266be0eba Remove CI workflow and instructions documentation files
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 23s
2026-03-27 22:18:02 -03:00
837214f41a fix: update Gitea registry login endpoint in build workflow
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 7s
2026-03-27 17:07:30 -03:00
fa4bf360ff fix: update registry URL in build workflow
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 12s
2026-03-27 17:05:51 -03:00
2072dd299d fix: enhance Gitea registry login step to handle empty secrets
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 27s
2026-03-27 16:50:50 -03:00
af391efa89 fix: update Gitea registry login step to use correct secret names
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 22s
2026-03-27 16:46:54 -03:00
8893e85d53 fix: move Docker Hub login step into build job
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 1m48s
2026-03-27 16:44:14 -03:00
14ecd2fa18 fix: add Docker Hub login step to build workflow
Some checks failed
Build And Publish Production Image / Log In To Docker Hub (push) Successful in 2s
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 12s
2026-03-27 16:42:57 -03:00
0fa3d28c1b Merge pull request 'develop' (#8) from develop into main
Some checks failed
Build And Publish Production Image / Build And Publish Production Image (push) Failing after 24s
Reviewed-on: #8
2026-03-27 16:35:58 -03:00
924d3eab35 Merge branch 'main' into develop 2026-03-27 16:35:54 -03:00
c6a3971c15 Merge pull request 'fix: ensure newline at end of file in build workflow' (#7) from feature/testing into develop
Reviewed-on: #7
2026-03-27 16:35:38 -03:00
18dba7f7a2 Merge branch 'feature/testing' of http://gitea.lab/sancho41/condado-newsletter into feature/testing
Some checks failed
CI / Backend Tests (pull_request) Has been cancelled
CI / Frontend Tests (pull_request) Has been cancelled
2026-03-27 16:34:54 -03:00
62306ea6a6 fix: update build trigger to use push on main branch instead of pull request review 2026-03-27 16:34:40 -03:00
90f63bc6ed fix: ensure newline at end of file in build workflow
Some checks failed
CI / Backend Tests (pull_request) Has been cancelled
CI / Frontend Tests (pull_request) Has been cancelled
2026-03-27 16:27:32 -03:00
ac6efceede fix: ensure newline at end of file in build workflow
Some checks failed
CI / Frontend Tests (pull_request) Has been cancelled
CI / Backend Tests (pull_request) Has been cancelled
2026-03-27 16:26:29 -03:00
440a7eade1 Merge pull request 'develop' (#6) from develop into main
Reviewed-on: #6
2026-03-27 16:24:58 -03:00
1581ddcaea Merge branch 'main' into develop 2026-03-27 16:24:50 -03:00
37a9ef22df Merge pull request 'feature/testing' (#5) from feature/testing into main
Reviewed-on: #5
2026-03-27 16:23:53 -03:00
81d04b63d1 develop (#4)
Reviewed-on: #4
Co-authored-by: Gabriel Sancho <gabriel.sancho13@gmail.com>
Co-committed-by: Gabriel Sancho <gabriel.sancho13@gmail.com>
2026-03-27 16:23:13 -03:00
6306073921 feature/testing (#3)
Reviewed-on: #3
Co-authored-by: Gabriel Sancho <gabriel.sancho13@gmail.com>
Co-committed-by: Gabriel Sancho <gabriel.sancho13@gmail.com>
2026-03-27 16:22:43 -03:00
5723c74e39 fix: add missing colon in Active Entities label on DashboardPage (#1) (#2)
Reviewed-on: #1
Co-authored-by: Gabriel Sancho <gabriel.sancho13@gmail.com>
Co-committed-by: Gabriel Sancho <gabriel.sancho13@gmail.com>
Reviewed-on: #2
2026-03-27 16:19:26 -03:00
46f78467bb fix: add missing colon in Active Entities label on DashboardPage (#1)
Reviewed-on: #1
Co-authored-by: Gabriel Sancho <gabriel.sancho13@gmail.com>
Co-committed-by: Gabriel Sancho <gabriel.sancho13@gmail.com>
2026-03-27 16:18:58 -03:00
d6de131a9b feat: update build workflow to create and publish all-in-one Docker image on approved PRs
Some checks failed
CI / Frontend Tests (pull_request) Has been cancelled
CI / Backend Tests (pull_request) Has been cancelled
2026-03-27 16:18:25 -03:00
6305a8e95e refactor: update build process to create a single all-in-one Docker image and adjust related configurations
Some checks failed
CI / Frontend Tests (pull_request) Has been cancelled
CI / Backend Tests (pull_request) Has been cancelled
2026-03-27 16:10:14 -03:00
3f0bb4be73 feat: update Docker configuration and CI/CD workflows for local image builds
Some checks failed
CI / Backend Tests (pull_request) Failing after 11m8s
CI / Frontend Tests (pull_request) Has been cancelled
2026-03-27 16:01:34 -03:00
06112330b6 fix(ci): add missing 'with' block for checkout step in backend and frontend jobs
Some checks failed
CI / Backend Tests (pull_request) Failing after 11m23s
CI / Frontend Tests (pull_request) Successful in 9m45s
2026-03-27 15:35:32 -03:00
46391948b3 fix: add missing colon in Active Entities label on DashboardPage
Some checks failed
CI / Backend Tests (pull_request) Failing after 2m11s
CI / Frontend Tests (pull_request) Failing after 1m23s
2026-03-27 15:28:12 -03:00
cf073be6b0 refactor: migrate CI/CD workflows from GitHub Actions to Gitea Actions and remove legacy workflows 2026-03-27 15:23:22 -03:00
433874d11e fix(frontend): keep entity and message deletes in sync 2026-03-27 03:38:41 -03:00
726c8f3afd fix(backend): allow blank prompt when creating tasks 2026-03-27 03:25:08 -03:00
0fc0416eb7 fix(docker): add extra_hosts for backend services 2026-03-27 03:22:37 -03:00
919aff07ff fix(docker): pass llama env vars to backend services 2026-03-27 03:15:17 -03:00
cd8e781b07 fix(backend): provide llama env config in test profile 2026-03-27 03:03:23 -03:00
490f0a6a5c chore(frontend): bump version to 0.2.2 2026-03-27 03:02:01 -03:00
1bd6c85fa8 fix(backend): implement step 1 — llama config from env vars 2026-03-27 03:01:53 -03:00
bb47bf25c5 test(backend): add failing tests for step 1 — llama env-only config 2026-03-27 03:01:35 -03:00
11f80b9dd7 docs(policy): enforce server-side data ownership and backend LLM mediation
- clarify frontend may only rely on backend-issued session token cookie for auth

- forbid frontend browser storage for domain/business data

- require backend-mediated LLM calls across agent workflows
2026-03-27 02:49:16 -03:00
37 changed files with 619 additions and 1262 deletions

View File

@@ -26,8 +26,12 @@ IMAP_INBOX_FOLDER=INBOX
OPENAI_API_KEY=sk-replace-me OPENAI_API_KEY=sk-replace-me
OPENAI_MODEL=gpt-4o OPENAI_MODEL=gpt-4o
# ── Llama / Ollama (backend preview generation) ───────────────────────────────
LLAMA_BASE_URL=http://celtinha.desktop:11434
LLAMA_MODEL=gemma3:4b
# ── Application ─────────────────────────────────────────────────────────────── # ── Application ───────────────────────────────────────────────────────────────
APP_RECIPIENTS=friend1@example.com,friend2@example.com APP_RECIPIENTS=friend1@example.com,friend2@example.com
# ── Frontend (Vite build-time) ──────────────────────────────────────────────── # ── Frontend (Vite dev proxy) ────────────────────────────────────────────────
VITE_API_BASE_URL=http://localhost VITE_API_BASE_URL=http://localhost

185
.gitea/workflows/build.yml Normal file
View File

@@ -0,0 +1,185 @@
name: Build And Publish Production Image
on:
push:
branches:
- main
jobs:
build:
name: Build And Publish Production Image
runs-on: ubuntu-latest
env:
REGISTRY: gitea.lab:80
IMAGE_NAME: sancho41/condado-newsletter
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
steps:
- uses: actions/checkout@v4
with:
github-server-url: http://gitea.lab
- name: Verify Docker CLI
run: docker version
- name: Log in to Docker Hub (optional)
if: ${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
run: echo "${{ secrets.DOCKERHUB_TOKEN }}" | docker login docker.io -u "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Build all-in-one image
run: docker build -t condado-newsletter:latest -f Dockerfile.allinone .
- name: Tag
run: |
docker tag condado-newsletter:latest ${REGISTRY}/${IMAGE_NAME}:latest
docker tag condado-newsletter:latest ${REGISTRY}/${IMAGE_NAME}:${{ github.sha }}
- name: Deploy stack via Portainer API
env:
STACK_NAME: codado-newsletter-stack
PORTAINER_URL: http://portainer.lab/
PORTAINER_API_KEY: ${{ secrets.PORTAINER_API_KEY }}
PORTAINER_ENDPOINT_ID: ${{ secrets.PORTAINER_ENDPOINT_ID }}
run: |
set -u
set +e
PORTAINER_BASE_URL=$(printf '%s' "${PORTAINER_URL}" | sed -E 's/[[:space:]]+$//; s#/*$##')
echo "Portainer deploy debug"
echo "PORTAINER_URL=${PORTAINER_URL}"
echo "PORTAINER_BASE_URL=${PORTAINER_BASE_URL}"
echo "STACK_NAME=${STACK_NAME}"
echo "PORTAINER_ENDPOINT_ID=${PORTAINER_ENDPOINT_ID}"
echo "HTTP_PROXY=${HTTP_PROXY:-<empty>}"
echo "HTTPS_PROXY=${HTTPS_PROXY:-<empty>}"
echo "NO_PROXY=${NO_PROXY:-<empty>}"
echo "Current runner network info:"
if command -v ip >/dev/null 2>&1; then
ip -4 addr show || true
ip route || true
else
hostname -I || true
fi
PORTAINER_HOST=$(printf '%s' "${PORTAINER_BASE_URL}" | sed -E 's#^[a-zA-Z]+://##; s#/.*$##; s/:.*$//')
echo "Resolved host target: ${PORTAINER_HOST}"
PORTAINER_IP=""
ACTIVE_PORTAINER_BASE_URL="${PORTAINER_BASE_URL}"
if command -v getent >/dev/null 2>&1; then
echo "Host lookup (getent):"
getent hosts "${PORTAINER_HOST}" || true
PORTAINER_IP=$(getent hosts "${PORTAINER_HOST}" | awk 'NR==1{print $1}')
if [ -n "${PORTAINER_IP}" ]; then
PORTAINER_IP_BASE_URL="${PORTAINER_BASE_URL/${PORTAINER_HOST}/${PORTAINER_IP}}"
echo "Portainer IP fallback URL: ${PORTAINER_IP_BASE_URL}"
fi
fi
STACKS_BODY=$(mktemp)
STACKS_ERR=$(mktemp)
STACKS_HTTP_CODE=$(curl -sS \
--noproxy "*" \
-o "${STACKS_BODY}" \
-w "%{http_code}" \
"${ACTIVE_PORTAINER_BASE_URL}/api/stacks" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
2>"${STACKS_ERR}")
STACKS_CURL_EXIT=$?
if [ "${STACKS_CURL_EXIT}" -eq 6 ] && [ -n "${PORTAINER_IP:-}" ]; then
echo "Retrying GET /api/stacks with IP fallback due to DNS failure"
STACKS_HTTP_CODE=$(curl -sS \
--noproxy "*" \
-o "${STACKS_BODY}" \
-w "%{http_code}" \
"${PORTAINER_IP_BASE_URL}/api/stacks" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
2>"${STACKS_ERR}")
STACKS_CURL_EXIT=$?
if [ "${STACKS_CURL_EXIT}" -eq 0 ]; then
ACTIVE_PORTAINER_BASE_URL="${PORTAINER_IP_BASE_URL}"
fi
fi
echo "GET /api/stacks curl exit: ${STACKS_CURL_EXIT}"
echo "GET /api/stacks http code: ${STACKS_HTTP_CODE}"
echo "GET /api/stacks stderr:"
cat "${STACKS_ERR}" || true
echo "GET /api/stacks response (sanitized):"
jq -r '.[] | "Id=\(.Id) Name=\(.Name) EndpointId=\(.EndpointId)"' "${STACKS_BODY}" || true
if [ "${STACKS_CURL_EXIT}" -ne 0 ]; then
echo "Failed to reach Portainer API while listing stacks."
exit "${STACKS_CURL_EXIT}"
fi
if [ "${STACKS_HTTP_CODE}" -lt 200 ] || [ "${STACKS_HTTP_CODE}" -ge 300 ]; then
echo "Portainer returned a non-success status for stack listing."
exit 1
fi
STACK_ID=$(jq -r --arg stack_name "${STACK_NAME}" '.[] | select(.Name == $stack_name) | .Id' "${STACKS_BODY}" | head -n 1)
APPLY_BODY=$(mktemp)
APPLY_ERR=$(mktemp)
if [ -n "${STACK_ID}" ]; then
echo "Existing stack found with id=${STACK_ID}; sending update request"
PAYLOAD=$(jq -n \
--rawfile stack_file docker-compose.prod.yml \
'{StackFileContent: $stack_file, Env: [], Prune: false, PullImage: false}')
APPLY_HTTP_CODE=$(curl -sS -X PUT \
--noproxy "*" \
-o "${APPLY_BODY}" \
-w "%{http_code}" \
"${ACTIVE_PORTAINER_BASE_URL}/api/stacks/${STACK_ID}?endpointId=${PORTAINER_ENDPOINT_ID}" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
-H "Content-Type: application/json" \
-d "${PAYLOAD}" \
2>"${APPLY_ERR}")
APPLY_CURL_EXIT=$?
else
echo "Stack not found; sending create request"
PAYLOAD=$(jq -n \
--arg name "${STACK_NAME}" \
--rawfile stack_file docker-compose.prod.yml \
'{Name: $name, StackFileContent: $stack_file, Env: [], FromAppTemplate: false}')
APPLY_HTTP_CODE=$(curl -sS -X POST \
--noproxy "*" \
-o "${APPLY_BODY}" \
-w "%{http_code}" \
"${ACTIVE_PORTAINER_BASE_URL}/api/stacks/create/standalone/string?endpointId=${PORTAINER_ENDPOINT_ID}" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
-H "Content-Type: application/json" \
-d "${PAYLOAD}" \
2>"${APPLY_ERR}")
APPLY_CURL_EXIT=$?
fi
echo "Apply curl exit: ${APPLY_CURL_EXIT}"
echo "Apply http code: ${APPLY_HTTP_CODE}"
echo "Apply stderr:"
cat "${APPLY_ERR}" || true
echo "Apply response body:"
cat "${APPLY_BODY}" || true
if [ "${APPLY_CURL_EXIT}" -ne 0 ]; then
echo "Failed to reach Portainer API while applying stack changes."
exit "${APPLY_CURL_EXIT}"
fi
if [ "${APPLY_HTTP_CODE}" -lt 200 ] || [ "${APPLY_HTTP_CODE}" -ge 300 ]; then
echo "Portainer returned a non-success status while applying stack changes."
exit 1
fi
echo "Portainer deploy step completed successfully"

View File

@@ -1,10 +1,8 @@
name: CI name: CI
on: on:
push:
branches: ["**"]
pull_request: pull_request:
branches: ["**"] branches: ["develop"]
jobs: jobs:
backend-test: backend-test:
@@ -15,6 +13,8 @@ jobs:
working-directory: backend working-directory: backend
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
github-server-url: http://gitea.lab
- name: Set up JDK 21 - name: Set up JDK 21
uses: actions/setup-java@v4 uses: actions/setup-java@v4
@@ -44,6 +44,8 @@ jobs:
working-directory: frontend working-directory: frontend
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
github-server-url: http://gitea.lab
- name: Set up Node 20 - name: Set up Node 20
uses: actions/setup-node@v4 uses: actions/setup-node@v4

View File

@@ -98,3 +98,5 @@ cd backend
- DO NOT put business logic in controllers. - DO NOT put business logic in controllers.
- DO NOT put prompt construction logic outside `PromptBuilderService`. - DO NOT put prompt construction logic outside `PromptBuilderService`.
- DO NOT modify frontend code — your scope is `backend/` only. - DO NOT modify frontend code — your scope is `backend/` only.
- DO enforce server-side persistence for all business/domain data; frontend must not be required to persist domain data.
- DO model generated test-message history as backend-owned task-related data with referential integrity and cleanup on task deletion.

View File

@@ -39,6 +39,8 @@ frontend/src/
7. **Routes:** new pages go in `src/pages/`, registered in `src/router/index.tsx`, lazy-loaded. 7. **Routes:** new pages go in `src/pages/`, registered in `src/router/index.tsx`, lazy-loaded.
8. **Strings:** no hardcoded user-facing strings outside of constants. 8. **Strings:** no hardcoded user-facing strings outside of constants.
9. **No over-engineering:** only add what is explicitly needed — no extra abstractions, helpers, or features. 9. **No over-engineering:** only add what is explicitly needed — no extra abstractions, helpers, or features.
10. **Data ownership:** domain/business data must stay server-side; frontend never persists entities, tasks, generated messages, logs, or similar domain data in `localStorage`, `sessionStorage`, or `IndexedDB`.
11. **LLM calls:** frontend must never call OpenAI/Ollama/Llama directly; use backend APIs only.
## TDD Cycle ## TDD Cycle
@@ -59,4 +61,5 @@ frontend/src/
- DO NOT store server data in `useState`. - DO NOT store server data in `useState`.
- DO NOT build custom UI primitives when a shadcn/ui component exists. - DO NOT build custom UI primitives when a shadcn/ui component exists.
- DO NOT write implementation code before the failing test exists. - DO NOT write implementation code before the failing test exists.
- DO NOT modify backend code — your scope is `frontend/` only. - DO NOT modify backend code — your scope is `frontend/` only.
- DO NOT store business/domain data in browser storage; only the backend-issued `httpOnly` session cookie is allowed for auth state.

View File

@@ -1,6 +1,6 @@
--- ---
name: infra name: infra
description: "Use when working on Docker configuration, Docker Compose files, Dockerfiles, Nginx config, Supervisor config, GitHub Actions workflows, CI/CD pipelines, environment variables, or overall project architecture in the condado-news-letter project. Trigger phrases: docker, dockerfile, compose, nginx, ci/cd, github actions, publish image, build fails, infra, architecture, environment variables, container, supervisor, allinone image, docker hub." description: "Use when working on Docker configuration, Docker Compose files, Dockerfiles, Nginx config, Supervisor config, Gitea Actions workflows, CI/CD pipelines, deploy flows, environment variables, or overall project architecture in the condado-news-letter project. Trigger phrases: docker, dockerfile, compose, nginx, ci/cd, gitea actions, deploy, build fails, infra, architecture, environment variables, container, supervisor, allinone image."
tools: [read, edit, search, execute, todo] tools: [read, edit, search, execute, todo]
argument-hint: "Describe the infrastructure change or Docker/CI task to implement." argument-hint: "Describe the infrastructure change or Docker/CI task to implement."
--- ---
@@ -15,14 +15,14 @@ You are a senior DevOps / infrastructure engineer and software architect for the
| `backend/Dockerfile` | Backend-only multi-stage build image | | `backend/Dockerfile` | Backend-only multi-stage build image |
| `frontend/Dockerfile` | Frontend build + Nginx image | | `frontend/Dockerfile` | Frontend build + Nginx image |
| `docker-compose.yml` | Dev stack (postgres + backend + nginx + mailhog) | | `docker-compose.yml` | Dev stack (postgres + backend + nginx + mailhog) |
| `docker-compose.prod.yml` | Prod stack (postgres + backend + nginx, no mailhog) | | `docker-compose.prod.yml` | Prod stack (single all-in-one image) |
| `nginx/nginx.conf` | Nginx config for multi-container compose flavours | | `nginx/nginx.conf` | Nginx config for multi-container compose flavours |
| `nginx/nginx.allinone.conf` | Nginx config for the all-in-one image (localhost backend) | | `nginx/nginx.allinone.conf` | Nginx config for the all-in-one image (localhost backend) |
| `frontend/nginx.docker.conf` | Nginx config embedded in frontend image | | `frontend/nginx.docker.conf` | Nginx config embedded in frontend image |
| `docker/supervisord.conf` | Supervisor config (manages postgres + java + nginx inside allinone) | | `docker/supervisord.conf` | Supervisor config (manages postgres + java + nginx inside allinone) |
| `docker/entrypoint.sh` | Allinone container entrypoint (DB init, env wiring, supervisord start) | | `docker/entrypoint.sh` | Allinone container entrypoint (DB init, env wiring, supervisord start) |
| `.github/workflows/ci.yml` | CI: backend tests + frontend tests on every push/PR | | `.gitea/workflows/ci.yml` | CI: backend tests + frontend tests on pull requests to `develop` |
| `.github/workflows/publish.yml` | CD: build & push allinone image to Docker Hub on `main` merge | | `.gitea/workflows/build.yml` | Build: create and publish the all-in-one image on approved PRs to `main` |
| `.env.example` | Template for all environment variables | | `.env.example` | Template for all environment variables |
## System Topology ## System Topology
@@ -54,7 +54,7 @@ Docker volume → /var/lib/postgresql/data
| Flavour | Command | Notes | | Flavour | Command | Notes |
|---|---|---| |---|---|---|
| Dev | `docker compose up --build` | Includes Mailhog on :1025/:8025 | | Dev | `docker compose up --build` | Includes Mailhog on :1025/:8025 |
| Prod (compose) | `docker compose -f docker-compose.prod.yml up --build` | External DB/SMTP | | Prod (compose) | `docker compose -f docker-compose.prod.yml up -d` | Prebuilt all-in-one image with internal PostgreSQL |
| All-in-one | `docker run -p 80:80 -e APP_PASSWORD=... <image>` | Everything in one container | | All-in-one | `docker run -p 80:80 -e APP_PASSWORD=... <image>` | Everything in one container |
## Key Environment Variables ## Key Environment Variables
@@ -74,20 +74,16 @@ All injected at runtime — never hardcoded in images.
| `IMAP_HOST` / `IMAP_PORT` / `IMAP_INBOX_FOLDER` | Backend | IMAP server | | `IMAP_HOST` / `IMAP_PORT` / `IMAP_INBOX_FOLDER` | Backend | IMAP server |
| `OPENAI_API_KEY` / `OPENAI_MODEL` | Backend | OpenAI credentials | | `OPENAI_API_KEY` / `OPENAI_MODEL` | Backend | OpenAI credentials |
| `APP_RECIPIENTS` | Backend | Comma-separated recipient emails | | `APP_RECIPIENTS` | Backend | Comma-separated recipient emails |
| `VITE_API_BASE_URL` | Frontend (build-time ARG) | Backend API base URL | | `VITE_API_BASE_URL` | Frontend dev server | Backend API base URL for Vite proxy |
## CI/CD Pipeline ## CI/CD Pipeline
| Workflow | Trigger | What it does | | Workflow | Trigger | What it does |
|---|---|---| |---|---|---|
| `ci.yml` | Push / PR to any branch | Backend `./gradlew test` + Frontend `npm run test` | | `ci.yml` | Pull request to `develop` | Backend `./gradlew test` + Frontend `npm run test` |
| `publish.yml` | Push to `main` | Builds `Dockerfile.allinone`, pushes `latest` + `<sha>` tags to Docker Hub | | `build.yml` | Approved PR review to `main` | Builds `condado-newsletter` on the target Docker host, then pushes `latest` and `${github.sha}` tags to Gitea container registry |
**Required GitHub Secrets:** `DOCKERHUB_USERNAME`, `DOCKERHUB_TOKEN` The runner shares the target Docker host, so this workflow builds the image locally, tags it for `gitea.lab/sancho41/condado-newsletter`, and pushes it to Gitea container registry. `docker-compose.prod.yml` must reference that published image and not local build directives.
**Image tags on main merge:**
- `<user>/condado-newsletter:latest`
- `<user>/condado-newsletter:<git-sha>`
## Implementation Rules ## Implementation Rules

View File

@@ -8,6 +8,8 @@ argument-hint: "Describe the feature, bug, or change to deliver end-to-end."
You are the **delivery orchestrator** for the **Condado Abaixo da Média SA** project. You own the full lifecycle of a work item — from the moment the user describes what they want, to a merged-ready pull request with the version bumped. You never implement code yourself; you coordinate specialist agents and run git/shell commands. You are the **delivery orchestrator** for the **Condado Abaixo da Média SA** project. You own the full lifecycle of a work item — from the moment the user describes what they want, to a merged-ready pull request with the version bumped. You never implement code yourself; you coordinate specialist agents and run git/shell commands.
Git hosting is Gitea at `http://gitea.lab/sancho41/condado-newsletter.git`, and workflow follows strict Git Flow.
## Pipeline Overview ## Pipeline Overview
``` ```
@@ -41,10 +43,10 @@ Announce the label before proceeding: **"Classified as: `<label>`"**
## Step 2 — Create Branch ## Step 2 — Create Branch
1. Verify the working tree is clean: `git status --short`. If dirty, stop and warn the user. 1. Verify the working tree is clean: `git status --short`. If dirty, stop and warn the user.
2. Ensure you are on `main` and it is up to date: `git checkout main && git pull`. 2. Ensure `main` and `develop` are up to date: `git checkout main && git pull && git checkout develop && git pull`.
3. Create and checkout the branch: 3. Create and checkout the branch:
```bash ```bash
git checkout -b <prefix>/<kebab-case-short-description> git checkout -b <prefix>/<kebab-case-short-description> develop
``` ```
Branch name must be lowercase, kebab-case, max 50 chars. Branch name must be lowercase, kebab-case, max 50 chars.
4. Announce the branch name. 4. Announce the branch name.
@@ -118,11 +120,12 @@ Read the new version from `frontend/package.json` after bumping.
git push -u origin <branch-name> git push -u origin <branch-name>
``` ```
2. Open a pull request using the GitHub CLI: 2. Open a pull request targeting `develop`.
If `tea` (Gitea CLI) is available, use:
```bash ```bash
gh pr create \ tea pr create \
--title "<conventional-commit-type>(<scope>): <short description>" \ --title "<conventional-commit-type>(<scope>): <short description>" \
--body "$(cat <<'EOF' --description "$(cat <<'EOF'
## Summary ## Summary
<1-3 sentences describing what was done and why> <1-3 sentences describing what was done and why>
@@ -141,9 +144,10 @@ Read the new version from `frontend/package.json` after bumping.
- Build green: `./gradlew build` + `npm run build` - Build green: `./gradlew build` + `npm run build`
EOF EOF
)" \ )" \
--base main \ --base develop \
--head <branch-name> --head <branch-name>
``` ```
If `tea` is unavailable, provide the exact PR title/body and instruct opening a PR in the Gitea web UI with base `develop`.
3. Announce the PR URL. 3. Announce the PR URL.
@@ -152,13 +156,15 @@ Read the new version from `frontend/package.json` after bumping.
## Constraints ## Constraints
- DO NOT implement any code yourself — delegate everything to specialist agents. - DO NOT implement any code yourself — delegate everything to specialist agents.
- DO NOT commit directly to `main`. - DO NOT commit directly to `main` or `develop`.
- DO NOT use `--force`, `--no-verify`, or `git reset --hard`. - DO NOT use `--force`, `--no-verify`, or `git reset --hard`.
- DO NOT proceed to the next step if the current step's tests are not green. - DO NOT proceed to the next step if the current step's tests are not green.
- DO NOT bump the version before all implementation commits are done. - DO NOT bump the version before all implementation commits are done.
- ALWAYS verify `git status` is clean before creating the branch. - ALWAYS verify `git status` is clean before creating the branch.
- ALWAYS use `gh pr create` (GitHub CLI) for pull requests — never instruct the user to open one manually unless `gh` is unavailable. - ALWAYS target `develop` for regular feature/fix/chore PRs.
- If `gh` is not installed, clearly tell the user and provide the exact PR title and body to paste into the GitHub UI. - Use Gitea flow for PR creation (`tea` if available, otherwise web UI instructions with exact PR metadata).
- ALWAYS enforce backend ownership of business/domain data; do not accept frontend browser storage solutions for domain persistence.
- ALWAYS enforce backend-mediated LLM calls; frontend must never call LLM providers directly.
--- ---

View File

@@ -14,7 +14,7 @@ Full-stack monorepo:
- **Frontend:** React 18 + Vite + TypeScript + shadcn/ui + TanStack Query v5 + Axios + React Router v6 - **Frontend:** React 18 + Vite + TypeScript + shadcn/ui + TanStack Query v5 + Axios + React Router v6
- **Auth:** Single admin, password via `APP_PASSWORD` env var, JWT in `httpOnly` cookie - **Auth:** Single admin, password via `APP_PASSWORD` env var, JWT in `httpOnly` cookie
- **Infra:** Docker Compose (dev + prod) + all-in-one Dockerfile, Nginx reverse proxy - **Infra:** Docker Compose (dev + prod) + all-in-one Dockerfile, Nginx reverse proxy
- **CI/CD:** GitHub Actions — tests on every PR, Docker Hub publish on `main` merge - **CI/CD:** Gitea Actions — tests on pull requests to `develop`
## Your Workflow ## Your Workflow
@@ -52,6 +52,8 @@ For each step output:
- DO reference specific existing files by path when relevant (e.g., `backend/src/main/kotlin/.../EntityService.kt`). - DO reference specific existing files by path when relevant (e.g., `backend/src/main/kotlin/.../EntityService.kt`).
- ALWAYS check the existing codebase before planning — never assume something doesn't exist. - ALWAYS check the existing codebase before planning — never assume something doesn't exist.
- ALWAYS respect the architecture: business logic in services, thin controllers, API layer in `src/api/`, React Query for server state. - ALWAYS respect the architecture: business logic in services, thin controllers, API layer in `src/api/`, React Query for server state.
- ALWAYS enforce backend-first data ownership in plans: domain/business data persistence belongs to backend/database, not browser storage.
- NEVER plan frontend direct LLM calls; all LLM interactions must be backend-mediated endpoints.
## Delegation Hint ## Delegation Hint

View File

@@ -1,60 +0,0 @@
name: Bump Frontend Version
on:
workflow_dispatch:
inputs:
bump:
description: Version bump type
required: true
default: patch
type: choice
options:
- patch
- minor
- major
permissions:
contents: write
jobs:
bump-version:
name: Bump frontend package version
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Node 20
uses: actions/setup-node@v4
with:
node-version: "20"
- name: Bump frontend version
working-directory: frontend
run: npm version ${{ inputs.bump }} --no-git-tag-version
- name: Commit and push version update
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add frontend/package.json
if [ -f frontend/package-lock.json ]; then
git add frontend/package-lock.json
fi
if git diff --cached --quiet; then
echo "No version changes to commit"
exit 0
fi
NEW_VERSION=$(node -p "require('./frontend/package.json').version")
TAG_NAME="frontend-v${NEW_VERSION}"
git commit -m "chore(frontend): bump version to ${NEW_VERSION}"
if git rev-parse "${TAG_NAME}" >/dev/null 2>&1; then
echo "Tag ${TAG_NAME} already exists"
exit 1
fi
git tag -a "${TAG_NAME}" -m "Frontend ${NEW_VERSION}"
git push
git push origin "${TAG_NAME}"

View File

@@ -1,36 +0,0 @@
name: Publish to Docker Hub
on:
push:
branches:
- main
jobs:
build-and-push:
name: Build & Push All-in-one Image
runs-on: ubuntu-latest
environment:
name: production
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: .
file: Dockerfile.allinone
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/condado-newsletter:latest
${{ secrets.DOCKERHUB_USERNAME }}/condado-newsletter:${{ github.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -83,8 +83,8 @@ The cycle for every step is:
| Reverse Proxy | Nginx (serves frontend + proxies `/api` to backend) | | Reverse Proxy | Nginx (serves frontend + proxies `/api` to backend) |
| Dev Mail | Mailhog (SMTP trap + web UI) | | Dev Mail | Mailhog (SMTP trap + web UI) |
| All-in-one image | Single Docker image: Nginx + Spring Boot + PostgreSQL + Supervisor | | All-in-one image | Single Docker image: Nginx + Spring Boot + PostgreSQL + Supervisor |
| Image registry | Docker Hub (`<dockerhub-user>/condado-newsletter`) | | Image registry | Gitea container registry (`gitea.lab/sancho41/condado-newsletter`) |
| CI/CD | GitHub Actions — build, test, push to Docker Hub on merge to `main` | | CI/CD | Gitea Actions — test PRs to `develop`, build and publish the production image on approved PRs targeting `main` |
## Deployment Flavours ## Deployment Flavours
@@ -93,7 +93,7 @@ There are **three ways to run the project**:
| Flavour | Command | When to use | | Flavour | Command | When to use |
|---------------------|---------------------------------|------------------------------------------------| |---------------------|---------------------------------|------------------------------------------------|
| **Dev** | `docker compose up` | Local development — includes Mailhog | | **Dev** | `docker compose up` | Local development — includes Mailhog |
| **Prod (compose)** | `docker compose -f docker-compose.prod.yml up` | Production with external DB/SMTP | | **Prod (compose)** | `docker compose -f docker-compose.prod.yml up -d` | Production with the prebuilt all-in-one image |
| **All-in-one** | `docker run ...` | Simplest deploy — everything in one container | | **All-in-one** | `docker run ...` | Simplest deploy — everything in one container |
### All-in-one Image ### All-in-one Image
@@ -104,7 +104,7 @@ The all-in-one image (`Dockerfile.allinone`) bundles **everything** into a singl
- **PostgreSQL** — embedded database - **PostgreSQL** — embedded database
- **Supervisor** — process manager that starts and supervises all three processes - **Supervisor** — process manager that starts and supervises all three processes
This image is published to Docker Hub at `<dockerhub-user>/condado-newsletter:latest`. The all-in-one image is built on the runner host and then published to the Gitea container registry.
**Minimal `docker run` command:** **Minimal `docker run` command:**
```bash ```bash
@@ -121,7 +121,7 @@ docker run -d \
-e IMAP_PORT=993 \ -e IMAP_PORT=993 \
-e APP_RECIPIENTS=friend1@example.com,friend2@example.com \ -e APP_RECIPIENTS=friend1@example.com,friend2@example.com \
-v condado-data:/var/lib/postgresql/data \ -v condado-data:/var/lib/postgresql/data \
<dockerhub-user>/condado-newsletter:latest gitea.lab/sancho41/condado-newsletter:latest
``` ```
The app is then available at `http://localhost`. The app is then available at `http://localhost`.
@@ -213,13 +213,15 @@ condado-news-letter/ ← repo root
├── .env.example ← template for all env vars ├── .env.example ← template for all env vars
├── .gitignore ├── .gitignore
├── docker-compose.yml ← dev stack (Nginx + Backend + PostgreSQL + Mailhog) ├── docker-compose.yml ← dev stack (Nginx + Backend + PostgreSQL + Mailhog)
├── docker-compose.prod.yml ← prod stack (Nginx + Backend + PostgreSQL) ├── docker-compose.prod.yml ← prod stack (single all-in-one image)
├── Dockerfile.allinone ← all-in-one image (Nginx + Backend + PostgreSQL + Supervisor) ├── Dockerfile.allinone ← all-in-one image (Nginx + Backend + PostgreSQL + Supervisor)
├── .github/ ├── .github/
│ └── workflows/ │ └── workflows/
── ci.yml ← run tests on every PR ── (legacy, unused after Gitea migration)
│ └── publish.yml ← build & push all-in-one image to Docker Hub on main merge ├── .gitea/
│ └── workflows/
│ └── ci.yml ← run tests on pull requests targeting `develop`
├── backend/ ← Spring Boot (Kotlin + Gradle) ├── backend/ ← Spring Boot (Kotlin + Gradle)
│ ├── build.gradle.kts │ ├── build.gradle.kts
@@ -310,7 +312,7 @@ npm run test
docker compose up --build docker compose up --build
# Prod # Prod
docker compose -f docker-compose.prod.yml up --build docker compose -f docker-compose.prod.yml up -d
# Stop # Stop
docker compose down docker compose down
@@ -363,6 +365,19 @@ docker compose down
--- ---
## Data Ownership Policy (Critical)
- **All business data must be persisted server-side** (PostgreSQL via backend APIs).
- The frontend must treat the backend as the single source of truth for entities, tasks,
generated preview messages/history, logs, and any other domain data.
- The frontend must **not** persist business/domain data in browser storage (`localStorage`,
`sessionStorage`, `IndexedDB`) or call LLM providers directly.
- The only browser-stored auth state is the backend-issued session token cookie (`httpOnly` JWT).
- If a required endpoint does not exist yet, implement it in the backend first; do not add
frontend-side persistence workarounds.
---
## Naming Conventions ## Naming Conventions
### Backend ### Backend
@@ -441,7 +456,7 @@ Never hardcode any of these values.
| `OPENAI_API_KEY` | Backend | OpenAI API key | | `OPENAI_API_KEY` | Backend | OpenAI API key |
| `OPENAI_MODEL` | Backend | OpenAI model (default: `gpt-4o`) | | `OPENAI_MODEL` | Backend | OpenAI model (default: `gpt-4o`) |
| `APP_RECIPIENTS` | Backend | Comma-separated list of recipient emails | | `APP_RECIPIENTS` | Backend | Comma-separated list of recipient emails |
| `VITE_API_BASE_URL` | Frontend | Backend API base URL (used by Vite at build time) | | `VITE_API_BASE_URL` | Frontend | Backend API base URL for the Vite dev server proxy |
> ⚠️ Never hardcode credentials. Always use environment variables or a `.env` file (gitignored). > ⚠️ Never hardcode credentials. Always use environment variables or a `.env` file (gitignored).
@@ -509,13 +524,17 @@ BODY:
## Git Workflow & CI/CD ## Git Workflow & CI/CD
- Branch naming: `feature/<short-description>`, `fix/<short-description>`, `chore/<short-description>` - Git hosting: Gitea instance at `http://gitea.lab`.
- Canonical remote: `origin = http://gitea.lab/sancho41/condado-newsletter.git`.
- Branch model: **Git Flow** (`main` + `develop` as permanent branches).
- Branch naming: `feature/<short-description>`, `fix/<short-description>`, `hotfix/<short-description>`, `release/<short-description>`, `chore/<short-description>`
- Commit messages follow [Conventional Commits](https://www.conventionalcommits.org/): `feat:`, `fix:`, `chore:`, `docs:`, `test:` - Commit messages follow [Conventional Commits](https://www.conventionalcommits.org/): `feat:`, `fix:`, `chore:`, `docs:`, `test:`
- Scope your commits: `feat(backend):`, `feat(frontend):`, `chore(docker):` - Scope your commits: `feat(backend):`, `feat(frontend):`, `chore(docker):`
- **TDD commit order per step:** first `test(<scope>): add failing tests for <step>`, then - **TDD commit order per step:** first `test(<scope>): add failing tests for <step>`, then
`feat(<scope>): implement <step> — all tests passing`. `feat(<scope>): implement <step> — all tests passing`.
- PRs require all CI checks to pass before merging. - Pull requests must target `develop` for regular work.
- Never commit directly to `main`. - CI runs on pull requests to `develop` and must pass before merge.
- Never commit directly to `main` or `develop`.
### Commit Rules (enforced by AI) ### Commit Rules (enforced by AI)
@@ -551,23 +570,14 @@ Good examples:
- `feat(frontend): implement step 2 - per-entity scheduled task creation` - `feat(frontend): implement step 2 - per-entity scheduled task creation`
- `docs(config): clarify english-first language policy and commit quality rules` - `docs(config): clarify english-first language policy and commit quality rules`
### GitHub Actions Workflows ### Gitea Actions Workflows
| Workflow file | Trigger | What it does | | Workflow file | Trigger | What it does |
|----------------------------|----------------------------|-----------------------------------------------------------| |----------------------------|----------------------------|-----------------------------------------------------------|
| `.github/workflows/ci.yml` | Push / PR to any branch | Backend tests (`./gradlew test`) + Frontend tests (`npm run test`) | | `.gitea/workflows/ci.yml` | PR to `develop` | Backend tests (`./gradlew test`) + Frontend tests (`npm run test`) |
| `.github/workflows/publish.yml` | Push to `main` | Builds `Dockerfile.allinone`, tags as `latest` + git SHA, pushes to Docker Hub | | `.gitea/workflows/build.yml` | Approved PR review on `main` | Build `condado-newsletter`, then publish `latest` and `${github.sha}` tags to Gitea container registry |
**Required GitHub Secrets:** Build policy: the runner shares the target Docker host, so the build workflow produces the image locally, tags it for `gitea.lab/sancho41/condado-newsletter`, and pushes it to Gitea container registry. `docker-compose.prod.yml` references that published image.
| Secret | Description |
|-----------------------|--------------------------------------------|
| `DOCKERHUB_USERNAME` | Docker Hub account username |
| `DOCKERHUB_TOKEN` | Docker Hub access token (not password) |
**Image tags pushed on every `main` merge:**
- `<dockerhub-user>/condado-newsletter:latest`
- `<dockerhub-user>/condado-newsletter:<git-sha>` (for pinning)
--- ---

View File

@@ -15,6 +15,7 @@ FROM gradle:8-jdk21-alpine AS backend-build
WORKDIR /app/backend WORKDIR /app/backend
COPY backend/build.gradle.kts backend/settings.gradle.kts ./ COPY backend/build.gradle.kts backend/settings.gradle.kts ./
COPY backend/gradle.properties ./
COPY backend/gradle ./gradle COPY backend/gradle ./gradle
RUN gradle dependencies --no-daemon --quiet || true RUN gradle dependencies --no-daemon --quiet || true

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@ import java.util.UUID
data class EntityTaskCreateDto( data class EntityTaskCreateDto(
@field:NotNull val entityId: UUID, @field:NotNull val entityId: UUID,
@field:NotBlank val name: String, @field:NotBlank val name: String,
@field:NotBlank val prompt: String, val prompt: String,
@field:NotBlank val scheduleCron: String, @field:NotBlank val scheduleCron: String,
@field:NotBlank val emailLookback: String @field:NotBlank val emailLookback: String
) )

View File

@@ -21,7 +21,7 @@ class EntityService(
/** Returns all virtual entities. */ /** Returns all virtual entities. */
fun findAll(): List<VirtualEntityResponseDto> = fun findAll(): List<VirtualEntityResponseDto> =
virtualEntityRepository.findAll().map { VirtualEntityResponseDto.from(it) } virtualEntityRepository.findAllByActiveTrue().map { VirtualEntityResponseDto.from(it) }
/** Returns one entity by ID, or null if not found. */ /** Returns one entity by ID, or null if not found. */
fun findById(id: UUID): VirtualEntityResponseDto? = fun findById(id: UUID): VirtualEntityResponseDto? =

View File

@@ -14,8 +14,10 @@ import java.util.Date
@Service @Service
class JwtService( class JwtService(
@Value("\${app.jwt.secret}") val secret: String, @Value("\${app.jwt.secret}") val secret: String,
@Value("\${app.jwt.expiration-ms}") val expirationMs: Long @Value("\${app.jwt.expiration-ms:86400000}") expirationMsRaw: String
) { ) {
private val expirationMs: Long = expirationMsRaw.toLongOrNull() ?: 86400000L
private val signingKey by lazy { private val signingKey by lazy {
Keys.hmacShaKeyFor(secret.toByteArray(Charsets.UTF_8)) Keys.hmacShaKeyFor(secret.toByteArray(Charsets.UTF_8))
} }

View File

@@ -10,8 +10,8 @@ import org.springframework.web.client.RestClient
@Service @Service
class LlamaPreviewService( class LlamaPreviewService(
private val restClient: RestClient, private val restClient: RestClient,
@Value("\${llama.base-url:http://localhost:11434}") private val baseUrl: String, @Value("\${llama.base-url}") private val baseUrl: String,
@Value("\${llama.model:gemma3:4b}") private val model: String @Value("\${llama.model}") private val model: String
) { ) {
/** /**

View File

@@ -49,8 +49,8 @@ openai:
model: ${OPENAI_MODEL:gpt-4o} model: ${OPENAI_MODEL:gpt-4o}
llama: llama:
base-url: ${LLAMA_BASE_URL:http://localhost:11434} base-url: ${LLAMA_BASE_URL}
model: ${LLAMA_MODEL:gemma3:4b} model: ${LLAMA_MODEL}
springdoc: springdoc:
swagger-ui: swagger-ui:

View File

@@ -0,0 +1,74 @@
package com.condado.newsletter.controller
import com.condado.newsletter.model.VirtualEntity
import com.condado.newsletter.repository.EntityTaskRepository
import com.condado.newsletter.repository.GeneratedMessageHistoryRepository
import com.condado.newsletter.repository.VirtualEntityRepository
import com.condado.newsletter.scheduler.EntityScheduler
import com.condado.newsletter.service.JwtService
import com.ninjasquad.springmockk.MockkBean
import jakarta.servlet.http.Cookie
import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.Test
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.http.MediaType
import org.springframework.test.web.servlet.MockMvc
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post
import org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath
import org.springframework.test.web.servlet.result.MockMvcResultMatchers.status
@SpringBootTest
@AutoConfigureMockMvc
class EntityTaskControllerTest {
@Autowired lateinit var mockMvc: MockMvc
@Autowired lateinit var jwtService: JwtService
@Autowired lateinit var virtualEntityRepository: VirtualEntityRepository
@Autowired lateinit var entityTaskRepository: EntityTaskRepository
@Autowired lateinit var generatedMessageHistoryRepository: GeneratedMessageHistoryRepository
@MockkBean lateinit var entityScheduler: EntityScheduler
private fun authCookie() = Cookie("jwt", jwtService.generateToken())
@AfterEach
fun cleanUp() {
generatedMessageHistoryRepository.deleteAll()
entityTaskRepository.deleteAll()
virtualEntityRepository.deleteAll()
}
@Test
fun should_createTask_when_promptIsBlankInCreateRequest() {
val entity = virtualEntityRepository.save(
VirtualEntity(
name = "Entity A",
email = "entity-a@condado.com",
jobTitle = "Ops"
)
)
val payload = """
{
"entityId": "${entity.id}",
"name": "Morning Blast",
"prompt": "",
"scheduleCron": "0 8 * * 1-5",
"emailLookback": "last_week"
}
""".trimIndent()
mockMvc.perform(
post("/api/v1/tasks")
.cookie(authCookie())
.contentType(MediaType.APPLICATION_JSON)
.content(payload)
)
.andExpect(status().isCreated)
.andExpect(jsonPath("$.entityId").value(entity.id.toString()))
.andExpect(jsonPath("$.name").value("Morning Blast"))
.andExpect(jsonPath("$.prompt").value(""))
}
}

View File

@@ -138,4 +138,32 @@ class TaskGeneratedMessageControllerTest {
.andExpect(jsonPath("$").isArray) .andExpect(jsonPath("$").isArray)
.andExpect(jsonPath("$.length()").value(0)) .andExpect(jsonPath("$.length()").value(0))
} }
@Test
fun should_deleteOnlySelectedHistoryItem_when_multipleMessagesExist() {
val task = createTask()
val firstMessage = generatedMessageHistoryRepository.save(
GeneratedMessageHistory(
task = task,
label = "Message #1",
content = "SUBJECT: First\nBODY:\nHello"
)
)
val secondMessage = generatedMessageHistoryRepository.save(
GeneratedMessageHistory(
task = task,
label = "Message #2",
content = "SUBJECT: Second\nBODY:\nHi"
)
)
mockMvc.perform(delete("/api/v1/tasks/${task.id}/generated-messages/${firstMessage.id}").cookie(authCookie()))
.andExpect(status().isNoContent)
mockMvc.perform(get("/api/v1/tasks/${task.id}/generated-messages").cookie(authCookie()))
.andExpect(status().isOk)
.andExpect(jsonPath("$.length()").value(1))
.andExpect(jsonPath("$[0].id").value(secondMessage.id.toString()))
.andExpect(jsonPath("$[0].label").value("Message #2"))
}
} }

View File

@@ -62,6 +62,17 @@ class VirtualEntityControllerTest {
.andExpect(status().isOk).andExpect(jsonPath("$").isArray).andExpect(jsonPath("$[0].name").value("Test Entity")) .andExpect(status().isOk).andExpect(jsonPath("$").isArray).andExpect(jsonPath("$[0].name").value("Test Entity"))
} }
@Test
fun should_returnOnlyActiveEntities_when_getAllEntities() {
virtualEntityRepository.save(VirtualEntity(name = "Active Entity", email = "active@condado.com", jobTitle = "Tester", active = true))
virtualEntityRepository.save(VirtualEntity(name = "Inactive Entity", email = "inactive@condado.com", jobTitle = "Tester", active = false))
mockMvc.perform(get("/api/v1/virtual-entities").cookie(authCookie()))
.andExpect(status().isOk)
.andExpect(jsonPath("$.length()").value(1))
.andExpect(jsonPath("$[0].name").value("Active Entity"))
}
@Test @Test
fun should_return200AndEntity_when_getById() { fun should_return200AndEntity_when_getById() {
val entity = virtualEntityRepository.save(VirtualEntity(name = "Test Entity", email = "entity@condado.com", jobTitle = "Test Job")) val entity = virtualEntityRepository.save(VirtualEntity(name = "Test Entity", email = "entity@condado.com", jobTitle = "Test Job"))

View File

@@ -40,7 +40,7 @@ class AuthServiceTest {
fun should_returnValidClaims_when_jwtTokenParsed() { fun should_returnValidClaims_when_jwtTokenParsed() {
val realJwtService = JwtService( val realJwtService = JwtService(
secret = "test-secret-key-for-testing-only-must-be-at-least-32-characters", secret = "test-secret-key-for-testing-only-must-be-at-least-32-characters",
expirationMs = 86400000L expirationMsRaw = "86400000"
) )
val token = realJwtService.generateToken() val token = realJwtService.generateToken()
@@ -51,7 +51,7 @@ class AuthServiceTest {
fun should_returnFalse_when_expiredTokenValidated() { fun should_returnFalse_when_expiredTokenValidated() {
val realJwtService = JwtService( val realJwtService = JwtService(
secret = "test-secret-key-for-testing-only-must-be-at-least-32-characters", secret = "test-secret-key-for-testing-only-must-be-at-least-32-characters",
expirationMs = 1L expirationMsRaw = "1"
) )
val token = realJwtService.generateToken() val token = realJwtService.generateToken()

View File

@@ -0,0 +1,26 @@
package com.condado.newsletter.service
import io.jsonwebtoken.Jwts
import io.jsonwebtoken.security.Keys
import org.junit.jupiter.api.Assertions.assertTrue
import org.junit.jupiter.api.Test
class JwtServiceTest {
private val secret = "12345678901234567890123456789012"
@Test
fun should_generate_token_when_expiration_is_empty() {
val jwtService = JwtService(secret, "")
val token = jwtService.generateToken()
val claims = Jwts.parser()
.verifyWith(Keys.hmacShaKeyFor(secret.toByteArray(Charsets.UTF_8)))
.build()
.parseSignedClaims(token)
.payload
assertTrue(claims.expiration.after(claims.issuedAt))
}
}

View File

@@ -0,0 +1,30 @@
package com.condado.newsletter.service
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import org.springframework.beans.factory.annotation.Value
import java.nio.file.Files
import java.nio.file.Path
class LlamaConfigurationBindingTest {
@Test
fun should_bindLlamaBaseUrlAndModelWithoutFallback_when_readingServiceValueAnnotations() {
val constructor = LlamaPreviewService::class.java.declaredConstructors.single()
val baseUrlExpression = constructor.parameters[1].getAnnotation(Value::class.java).value
val modelExpression = constructor.parameters[2].getAnnotation(Value::class.java).value
assertThat(baseUrlExpression).isEqualTo("\${llama.base-url}")
assertThat(modelExpression).isEqualTo("\${llama.model}")
}
@Test
fun should_referenceEnvironmentPlaceholdersWithoutFallback_when_readingApplicationYaml() {
val yaml = Files.readString(Path.of("src/main/resources/application.yml"))
assertThat(yaml).contains("base-url: \${LLAMA_BASE_URL}")
assertThat(yaml).contains("model: \${LLAMA_MODEL}")
assertThat(yaml).doesNotContain("LLAMA_BASE_URL:http://localhost:11434")
assertThat(yaml).doesNotContain("LLAMA_MODEL:gemma3:4b")
}
}

View File

@@ -38,3 +38,7 @@ imap:
openai: openai:
api-key: test-api-key api-key: test-api-key
model: gpt-4o model: gpt-4o
llama:
base-url: http://localhost:11434
model: gemma3:4b

View File

@@ -1,40 +1,15 @@
services: services:
condado-newsletter:
# ── PostgreSQL ─────────────────────────────────────────────────────────────── image: sancho41/condado-newsletter:latest
postgres: container_name: condado-newsletter
image: postgres:16-alpine restart: unless-stopped
restart: always
environment:
POSTGRES_DB: condado
POSTGRES_USER: ${SPRING_DATASOURCE_USERNAME}
POSTGRES_PASSWORD: ${SPRING_DATASOURCE_PASSWORD}
volumes:
- postgres-data:/var/lib/postgresql/data
networks:
- condado-net
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${SPRING_DATASOURCE_USERNAME} -d condado"]
interval: 10s
timeout: 5s
retries: 5
# ── Backend (Spring Boot) ────────────────────────────────────────────────────
backend:
build:
context: ./backend
dockerfile: Dockerfile
restart: always
depends_on:
postgres:
condition: service_healthy
environment: environment:
SPRING_PROFILES_ACTIVE: prod SPRING_PROFILES_ACTIVE: prod
SPRING_DATASOURCE_URL: ${SPRING_DATASOURCE_URL}
SPRING_DATASOURCE_USERNAME: ${SPRING_DATASOURCE_USERNAME} SPRING_DATASOURCE_USERNAME: ${SPRING_DATASOURCE_USERNAME}
SPRING_DATASOURCE_PASSWORD: ${SPRING_DATASOURCE_PASSWORD} SPRING_DATASOURCE_PASSWORD: ${SPRING_DATASOURCE_PASSWORD}
APP_PASSWORD: ${APP_PASSWORD} APP_PASSWORD: ${APP_PASSWORD}
JWT_SECRET: ${JWT_SECRET} JWT_SECRET: ${JWT_SECRET}
JWT_EXPIRATION_MS: ${JWT_EXPIRATION_MS} JWT_EXPIRATION_MS: ${JWT_EXPIRATION_MS:-86400000}
MAIL_HOST: ${MAIL_HOST} MAIL_HOST: ${MAIL_HOST}
MAIL_PORT: ${MAIL_PORT} MAIL_PORT: ${MAIL_PORT}
MAIL_USERNAME: ${MAIL_USERNAME} MAIL_USERNAME: ${MAIL_USERNAME}
@@ -44,28 +19,28 @@ services:
IMAP_INBOX_FOLDER: ${IMAP_INBOX_FOLDER} IMAP_INBOX_FOLDER: ${IMAP_INBOX_FOLDER}
OPENAI_API_KEY: ${OPENAI_API_KEY} OPENAI_API_KEY: ${OPENAI_API_KEY}
OPENAI_MODEL: ${OPENAI_MODEL} OPENAI_MODEL: ${OPENAI_MODEL}
LLAMA_BASE_URL: ${LLAMA_BASE_URL}
LLAMA_MODEL: ${LLAMA_MODEL}
APP_RECIPIENTS: ${APP_RECIPIENTS} APP_RECIPIENTS: ${APP_RECIPIENTS}
networks: extra_hosts:
- condado-net - "celtinha.desktop:host-gateway"
- "host.docker.internal:host-gateway"
# ── Frontend + Nginx ───────────────────────────────────────────────────────── volumes:
nginx: - postgres-data:/var/lib/postgresql/data
build: labels:
context: ./frontend - "traefik.enable=true"
dockerfile: Dockerfile - "traefik.http.routers.condado.rule=Host(`condado-newsletter.lab`)"
args: - "traefik.http.services.condado.loadbalancer.server.port=80"
VITE_API_BASE_URL: ${VITE_API_BASE_URL} - "homepage.group=Hyperlink"
restart: always - "homepage.name=Condado Newsletter"
ports: - "homepage.description=Automated newsletter generator using AI"
- "80:80" - "homepage.logo=claude-dark.png"
depends_on: - "homepage.href=http://condado-newsletter.lab"
- backend
networks:
- condado-net
volumes: volumes:
postgres-data: postgres-data:
networks: networks:
condado-net: default:
driver: bridge name: traefik
external: true

View File

@@ -4,14 +4,13 @@ services:
postgres: postgres:
image: postgres:16-alpine image: postgres:16-alpine
restart: unless-stopped restart: unless-stopped
container_name: condado-newsletter-postgres
environment: environment:
POSTGRES_DB: condado POSTGRES_DB: condado
POSTGRES_USER: ${SPRING_DATASOURCE_USERNAME} POSTGRES_USER: ${SPRING_DATASOURCE_USERNAME}
POSTGRES_PASSWORD: ${SPRING_DATASOURCE_PASSWORD} POSTGRES_PASSWORD: ${SPRING_DATASOURCE_PASSWORD}
volumes: volumes:
- postgres-data:/var/lib/postgresql/data - postgres-data:/var/lib/postgresql/data
networks:
- condado-net
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${SPRING_DATASOURCE_USERNAME} -d condado"] test: ["CMD-SHELL", "pg_isready -U ${SPRING_DATASOURCE_USERNAME} -d condado"]
interval: 10s interval: 10s
@@ -20,6 +19,7 @@ services:
# ── Backend (Spring Boot) ──────────────────────────────────────────────────── # ── Backend (Spring Boot) ────────────────────────────────────────────────────
backend: backend:
container_name: condado-newsletter-backend
build: build:
context: ./backend context: ./backend
dockerfile: Dockerfile dockerfile: Dockerfile
@@ -29,7 +29,7 @@ services:
condition: service_healthy condition: service_healthy
environment: environment:
SPRING_PROFILES_ACTIVE: dev SPRING_PROFILES_ACTIVE: dev
SPRING_DATASOURCE_URL: ${SPRING_DATASOURCE_URL} SPRING_DATASOURCE_URL: jdbc:postgresql://postgres:5432/condado
SPRING_DATASOURCE_USERNAME: ${SPRING_DATASOURCE_USERNAME} SPRING_DATASOURCE_USERNAME: ${SPRING_DATASOURCE_USERNAME}
SPRING_DATASOURCE_PASSWORD: ${SPRING_DATASOURCE_PASSWORD} SPRING_DATASOURCE_PASSWORD: ${SPRING_DATASOURCE_PASSWORD}
APP_PASSWORD: ${APP_PASSWORD} APP_PASSWORD: ${APP_PASSWORD}
@@ -44,37 +44,48 @@ services:
IMAP_INBOX_FOLDER: ${IMAP_INBOX_FOLDER} IMAP_INBOX_FOLDER: ${IMAP_INBOX_FOLDER}
OPENAI_API_KEY: ${OPENAI_API_KEY} OPENAI_API_KEY: ${OPENAI_API_KEY}
OPENAI_MODEL: ${OPENAI_MODEL} OPENAI_MODEL: ${OPENAI_MODEL}
LLAMA_BASE_URL: ${LLAMA_BASE_URL}
LLAMA_MODEL: ${LLAMA_MODEL}
APP_RECIPIENTS: ${APP_RECIPIENTS} APP_RECIPIENTS: ${APP_RECIPIENTS}
networks: extra_hosts:
- condado-net - "celtinha.desktop:host-gateway"
- "host.docker.internal:host-gateway"
# ── Frontend + Nginx ───────────────────────────────────────────────────────── # ── Frontend + Nginx ─────────────────────────────────────────────────────────
nginx: nginx:
container_name: condado-newsletter-frontend
build: build:
context: ./frontend context: ./frontend
dockerfile: Dockerfile dockerfile: Dockerfile
args: args:
VITE_API_BASE_URL: ${VITE_API_BASE_URL} VITE_API_BASE_URL: ${VITE_API_BASE_URL}
restart: unless-stopped restart: unless-stopped
ports:
- "80:80"
depends_on: depends_on:
- backend - backend
networks: networks:
- condado-net - traefik
labels:
- "traefik.enable=true"
- "traefik.http.routers.condado.rule=Host(`condado-newsletter.lab`)"
- "traefik.http.services.condado.loadbalancer.server.port=80"
- "homepage.group=Hyperlink"
- "homepage.name=Condado Newsletter"
- "homepage.description=Automated newsletter generator using AI"
- "homepage.logo=claude-dark.png"
- "homepage.href=http://condado-newsletter.lab"
# ── Mailhog (DEV ONLY — SMTP trap) ─────────────────────────────────────────── # ── Mailhog (DEV ONLY — SMTP trap) ───────────────────────────────────────────
mailhog: mailhog:
container_name: condado-newsletter-mailhog
image: mailhog/mailhog:latest image: mailhog/mailhog:latest
restart: unless-stopped restart: unless-stopped
ports: ports:
- "8025:8025" - "8025:8025"
networks:
- condado-net
volumes: volumes:
postgres-data: postgres-data:
networks: networks:
condado-net: traefik:
driver: bridge external: true
name: traefik

View File

@@ -1,6 +1,10 @@
#!/bin/bash #!/bin/bash
set -e set -e
APP_DB_NAME=${APP_DB_NAME:-condado}
APP_DB_USER=${SPRING_DATASOURCE_USERNAME:-condado}
APP_DB_PASSWORD=${SPRING_DATASOURCE_PASSWORD:-condado}
# ── Initialise PostgreSQL data directory on first run ───────────────────────── # ── Initialise PostgreSQL data directory on first run ─────────────────────────
if [ ! -f /var/lib/postgresql/data/PG_VERSION ]; then if [ ! -f /var/lib/postgresql/data/PG_VERSION ]; then
echo "Initialising PostgreSQL data directory..." echo "Initialising PostgreSQL data directory..."
@@ -9,8 +13,8 @@ if [ ! -f /var/lib/postgresql/data/PG_VERSION ]; then
# Start postgres temporarily to create the app database and user # Start postgres temporarily to create the app database and user
su -c "/usr/lib/postgresql/16/bin/pg_ctl -D /var/lib/postgresql/data -w start" postgres su -c "/usr/lib/postgresql/16/bin/pg_ctl -D /var/lib/postgresql/data -w start" postgres
su -c "psql -c \"CREATE USER condado WITH PASSWORD 'condado';\"" postgres su -c "psql -v ON_ERROR_STOP=1 -c \"CREATE USER ${APP_DB_USER} WITH PASSWORD '${APP_DB_PASSWORD}';\"" postgres
su -c "psql -c \"CREATE DATABASE condado OWNER condado;\"" postgres su -c "psql -v ON_ERROR_STOP=1 -c \"CREATE DATABASE ${APP_DB_NAME} OWNER ${APP_DB_USER};\"" postgres
su -c "/usr/lib/postgresql/16/bin/pg_ctl -D /var/lib/postgresql/data -w stop" postgres su -c "/usr/lib/postgresql/16/bin/pg_ctl -D /var/lib/postgresql/data -w stop" postgres
echo "PostgreSQL initialised." echo "PostgreSQL initialised."
@@ -20,9 +24,25 @@ fi
mkdir -p /var/log/supervisor mkdir -p /var/log/supervisor
# ── Defaults for all-in-one local PostgreSQL ───────────────────────────────── # ── Defaults for all-in-one local PostgreSQL ─────────────────────────────────
export SPRING_DATASOURCE_URL=${SPRING_DATASOURCE_URL:-jdbc:postgresql://localhost:5432/condado} export SPRING_DATASOURCE_URL=${SPRING_DATASOURCE_URL:-jdbc:postgresql://localhost:5432/${APP_DB_NAME}}
export SPRING_DATASOURCE_USERNAME=${SPRING_DATASOURCE_USERNAME:-condado} export SPRING_DATASOURCE_USERNAME=${SPRING_DATASOURCE_USERNAME:-${APP_DB_USER}}
export SPRING_DATASOURCE_PASSWORD=${SPRING_DATASOURCE_PASSWORD:-condado} export SPRING_DATASOURCE_PASSWORD=${SPRING_DATASOURCE_PASSWORD:-${APP_DB_PASSWORD}}
export JWT_EXPIRATION_MS=${JWT_EXPIRATION_MS:-86400000}
# ── Log all Spring Boot environment variables for debugging ──────────────────
echo "========================================"
echo "Spring Boot Configuration:"
echo "========================================"
echo "SPRING_DATASOURCE_URL=${SPRING_DATASOURCE_URL}"
echo "SPRING_DATASOURCE_USERNAME=${SPRING_DATASOURCE_USERNAME}"
echo "SPRING_DATASOURCE_PASSWORD=${SPRING_DATASOURCE_PASSWORD}"
echo "JWT_EXPIRATION_MS=${JWT_EXPIRATION_MS}"
echo "JAVA_OPTS=${JAVA_OPTS:-not set}"
echo "OPENAI_API_KEY=${OPENAI_API_KEY:-not set}"
echo "========================================"
# ── Start all services via supervisord ─────────────────────────────────────── # ── Start all services via supervisord ───────────────────────────────────────
# Export unbuffered output for both Python and Java
export PYTHONUNBUFFERED=1
export JAVA_OPTS="${JAVA_OPTS} -Dfile.encoding=UTF-8 -Djava.awt.headless=true"
exec /usr/bin/supervisord -c /etc/supervisor/conf.d/supervisord.conf exec /usr/bin/supervisord -c /etc/supervisor/conf.d/supervisord.conf

View File

@@ -1,27 +1,36 @@
[supervisord] [supervisord]
nodaemon=true nodaemon=true
logfile=/var/log/supervisor/supervisord.log silent=false
logfile=/dev/stdout
logfile_maxbytes=0
pidfile=/var/run/supervisord.pid pidfile=/var/run/supervisord.pid
loglevel=info
[program:postgres] [program:postgres]
command=/usr/lib/postgresql/16/bin/postgres -D /var/lib/postgresql/data command=/usr/lib/postgresql/16/bin/postgres -D /var/lib/postgresql/data
user=postgres user=postgres
autostart=true autostart=true
autorestart=true autorestart=true
stdout_logfile=/var/log/supervisor/postgres.log stdout_logfile=/dev/stdout
stderr_logfile=/var/log/supervisor/postgres.err.log stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
[program:backend] [program:backend]
command=java -jar /app/app.jar command=java -Dspring.output.ansi.enabled=always -Dlogging.level.root=DEBUG -jar /app/app.jar
autostart=true autostart=true
autorestart=true autorestart=true
startsecs=15 startsecs=15
stdout_logfile=/var/log/supervisor/backend.log stdout_logfile=/dev/stdout
stderr_logfile=/var/log/supervisor/backend.err.log stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
[program:nginx] [program:nginx]
command=/usr/sbin/nginx -g "daemon off;" command=/usr/sbin/nginx -g "daemon off;"
autostart=true autostart=true
autorestart=true autorestart=true
stdout_logfile=/var/log/supervisor/nginx.log stdout_logfile=/dev/stdout
stderr_logfile=/var/log/supervisor/nginx.err.log stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0

View File

@@ -1,12 +1,12 @@
{ {
"name": "condado-newsletter-frontend", "name": "condado-newsletter-frontend",
"version": "0.2.1", "version": "0.2.2",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "condado-newsletter-frontend", "name": "condado-newsletter-frontend",
"version": "0.2.1", "version": "0.2.2",
"dependencies": { "dependencies": {
"@radix-ui/react-dialog": "^1.0.5", "@radix-ui/react-dialog": "^1.0.5",
"@radix-ui/react-dropdown-menu": "^2.0.6", "@radix-ui/react-dropdown-menu": "^2.0.6",

View File

@@ -1,7 +1,7 @@
{ {
"name": "condado-newsletter-frontend", "name": "condado-newsletter-frontend",
"private": true, "private": true,
"version": "0.2.1", "version": "0.2.2",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "vite", "dev": "vite",

View File

@@ -294,6 +294,45 @@ describe('EditTaskPage', () => {
}) })
}) })
it('should_removeOnlyDeletedGeneratedMessage_when_deleteSucceedsWithoutRefetch', async () => {
persistedHistory = [
{
id: 'message-2',
taskId: 'task-1',
label: 'Message #2',
content: 'SUBJECT: Second\nBODY:\nSecond output',
createdAt: '2026-03-27T12:10:00Z',
},
{
id: 'message-1',
taskId: 'task-1',
label: 'Message #1',
content: 'SUBJECT: First\nBODY:\nFirst output',
createdAt: '2026-03-27T12:00:00Z',
},
]
vi.mocked(tasksApi.getTaskGeneratedMessages).mockResolvedValue(persistedHistory)
vi.mocked(tasksApi.deleteTaskGeneratedMessage).mockResolvedValue(undefined)
renderPage()
const secondMessageHistoryItem = await screen.findByRole('button', { name: /^message #2$/i })
expect(await screen.findByRole('button', { name: /^message #1$/i })).toBeInTheDocument()
fireEvent.click(
screen.getByRole('button', {
name: /delete message #1/i,
})
)
await waitFor(() => {
expect(tasksApi.deleteTaskGeneratedMessage).toHaveBeenCalledWith('task-1', 'message-1')
expect(screen.queryByRole('button', { name: /^message #1$/i })).not.toBeInTheDocument()
expect(secondMessageHistoryItem).toBeInTheDocument()
expect(screen.getByText(/Second output/i)).toBeInTheDocument()
})
})
it('should_loadPersistedGeneratedMessageHistory_when_pageLoads', async () => { it('should_loadPersistedGeneratedMessageHistory_when_pageLoads', async () => {
persistedHistory = [ persistedHistory = [
{ {

View File

@@ -89,6 +89,23 @@ describe('EntitiesPage', () => {
}) })
}) })
it('should_removeDeletedEntityFromList_when_deleteSucceeds', async () => {
vi.mocked(entitiesApi.getEntities).mockResolvedValue([mockEntity])
vi.mocked(entitiesApi.deleteEntity).mockResolvedValue(undefined)
render(<EntitiesPage />, { wrapper })
await waitFor(() => {
expect(screen.getByText('Test Entity')).toBeInTheDocument()
})
fireEvent.click(screen.getByRole('button', { name: /delete|deactivate/i }))
await waitFor(() => {
expect(screen.queryByText('Test Entity')).not.toBeInTheDocument()
})
})
it('should_renderDetailLink_when_entitiesLoaded', async () => { it('should_renderDetailLink_when_entitiesLoaded', async () => {
vi.mocked(entitiesApi.getEntities).mockResolvedValue([mockEntity]) vi.mocked(entitiesApi.getEntities).mockResolvedValue([mockEntity])
render(<EntitiesPage />, { wrapper }) render(<EntitiesPage />, { wrapper })

View File

@@ -27,7 +27,7 @@ export default function DashboardPage() {
<div className="grid gap-4 md:grid-cols-2"> <div className="grid gap-4 md:grid-cols-2">
<div className="rounded-xl border border-slate-800 bg-slate-900/70 p-5 shadow-sm"> <div className="rounded-xl border border-slate-800 bg-slate-900/70 p-5 shadow-sm">
<p className="text-sm text-slate-400">Active Entities</p> <p className="text-sm text-slate-400">Active Entities:</p>
<p className="mt-1 text-2xl font-bold">{activeCount} active {activeCount === 1 ? 'entity' : 'entities'}</p> <p className="mt-1 text-2xl font-bold">{activeCount} active {activeCount === 1 ? 'entity' : 'entities'}</p>
</div> </div>
<div className="rounded-xl border border-slate-800 bg-slate-900/70 p-5 shadow-sm"> <div className="rounded-xl border border-slate-800 bg-slate-900/70 p-5 shadow-sm">

View File

@@ -255,8 +255,15 @@ export default function EditTaskPage() {
const deleteGeneratedMessageMutation = useMutation({ const deleteGeneratedMessageMutation = useMutation({
mutationFn: (messageId: string) => deleteTaskGeneratedMessage(taskId, messageId), mutationFn: (messageId: string) => deleteTaskGeneratedMessage(taskId, messageId),
onSuccess: async () => { onSuccess: ( _data, messageId) => {
await queryClient.invalidateQueries({ queryKey: ['task-generated-messages', taskId] }) queryClient.setQueryData(
['task-generated-messages', taskId],
(
currentMessages:
| Awaited<ReturnType<typeof getTaskGeneratedMessages>>
| undefined
) => currentMessages?.filter((message) => message.id !== messageId) ?? []
)
}, },
}) })

View File

@@ -37,7 +37,13 @@ export default function EntitiesPage() {
const deleteMutation = useMutation({ const deleteMutation = useMutation({
mutationFn: (id: string) => deleteEntity(id), mutationFn: (id: string) => deleteEntity(id),
onSuccess: () => queryClient.invalidateQueries({ queryKey: ['entities'] }), onSuccess: (_data, id) => {
queryClient.setQueryData(
['entities'],
(currentEntities: Awaited<ReturnType<typeof getEntities>> | undefined) =>
currentEntities?.filter((entity) => entity.id !== id) ?? []
)
},
}) })
return ( return (

View File

@@ -15,6 +15,9 @@ http {
gzip_types text/plain text/css application/json application/javascript gzip_types text/plain text/css application/json application/javascript
text/xml application/xml application/xml+rss text/javascript; text/xml application/xml application/xml+rss text/javascript;
access_log /dev/stdout;
error_log /dev/stderr;
server { server {
listen 80; listen 80;
server_name _; server_name _;