refactor: quality workflow, docs, tests

This commit is contained in:
Esdras Renan 2025-10-16 19:14:46 -03:00
parent a9caf36b01
commit 68ace0a858
27 changed files with 758 additions and 330 deletions

43
.github/workflows/quality-checks.yml vendored Normal file
View file

@ -0,0 +1,43 @@
name: Quality Checks
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
lint-test-build:
name: Lint, Test and Build
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 9
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
cache: pnpm
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Generate Prisma client
run: pnpm prisma:generate
- name: Lint
run: pnpm lint
- name: Test
run: pnpm test
- name: Build
run: pnpm build

View file

@ -543,7 +543,7 @@ function App() {
const redirectTarget = persona === "manager" ? "/dashboard" : "/portal/tickets"
const url = `${resolvedAppUrl}/machines/handshake?token=${encodeURIComponent(token)}&redirect=${encodeURIComponent(redirectTarget)}`
window.location.href = url
}, [token, config?.accessRole, resolvedAppUrl])
}, [token, config?.accessRole, resolvedAppUrl, store])
async function reprovision() {
if (!store) return

View file

@ -1,7 +1,5 @@
import { action, mutation, query } from "./_generated/server"
import { api } from "./_generated/api"
import { mutation, query } from "./_generated/server"
import { v } from "convex/values"
import type { Id } from "./_generated/dataModel"
export const log = mutation({
args: {

View file

@ -107,7 +107,7 @@ async function getActiveToken(
const tokenHash = hashToken(tokenValue)
const token = await ctx.db
.query("machineTokens")
.withIndex("by_token_hash", (q: any) => q.eq("tokenHash", tokenHash))
.withIndex("by_token_hash", (q) => q.eq("tokenHash", tokenHash))
.unique()
if (!token) {
@ -163,6 +163,42 @@ function mergeMetadata(current: unknown, patch: Record<string, unknown>) {
return base
}
type JsonRecord = Record<string, unknown>
function ensureRecord(value: unknown): JsonRecord | null {
return isObject(value) ? (value as JsonRecord) : null
}
function ensureRecordArray(value: unknown): JsonRecord[] {
if (!Array.isArray(value)) return []
return value.filter(isObject) as JsonRecord[]
}
function ensureFiniteNumber(value: unknown): number | null {
const num = typeof value === "number" ? value : Number(value)
return Number.isFinite(num) ? num : null
}
function ensureString(value: unknown): string | null {
return typeof value === "string" ? value : null
}
function getNestedRecord(root: JsonRecord | null, ...keys: string[]): JsonRecord | null {
let current: JsonRecord | null = root
for (const key of keys) {
if (!current) return null
current = ensureRecord(current[key])
}
return current
}
function getNestedRecordArray(root: JsonRecord | null, ...keys: string[]): JsonRecord[] {
if (keys.length === 0) return []
const parent = getNestedRecord(root, ...keys.slice(0, -1))
if (!parent) return []
return ensureRecordArray(parent[keys[keys.length - 1]])
}
type PostureFinding = {
kind: "CPU_HIGH" | "SERVICE_DOWN" | "SMART_FAIL"
message: string
@ -179,16 +215,16 @@ async function createTicketForAlert(
const actorEmail = process.env["MACHINE_ALERTS_TICKET_REQUESTER_EMAIL"] ?? "admin@sistema.dev"
const actor = await ctx.db
.query("users")
.withIndex("by_tenant_email", (q: any) => q.eq("tenantId", tenantId).eq("email", actorEmail))
.withIndex("by_tenant_email", (q) => q.eq("tenantId", tenantId).eq("email", actorEmail))
.unique()
if (!actor) return null
// pick first category/subcategory if not configured
const category = await ctx.db.query("ticketCategories").withIndex("by_tenant", (q: any) => q.eq("tenantId", tenantId)).first()
const category = await ctx.db.query("ticketCategories").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).first()
if (!category) return null
const subcategory = await ctx.db
.query("ticketSubcategories")
.withIndex("by_category_order", (q: any) => q.eq("categoryId", category._id))
.withIndex("by_category_order", (q) => q.eq("categoryId", category._id))
.first()
if (!subcategory) return null
@ -217,20 +253,32 @@ async function createTicketForAlert(
async function evaluatePostureAndMaybeRaise(
ctx: MutationCtx,
machine: Doc<"machines">,
args: { metrics?: any; inventory?: any; metadata?: any }
args: {
metrics?: JsonRecord | null
inventory?: JsonRecord | null
metadata?: JsonRecord | null
}
) {
const findings: PostureFinding[] = []
// Janela temporal de CPU (5 minutos)
const now = Date.now()
const metrics = args.metrics ?? (args.metadata?.metrics ?? null)
const metaObj = machine.metadata && typeof machine.metadata === "object" ? (machine.metadata as Record<string, unknown>) : {}
const prevWindow: Array<{ ts: number; usage: number }> = Array.isArray((metaObj as any).cpuWindow)
? (((metaObj as any).cpuWindow as Array<any>).map((p) => ({ ts: Number(p.ts ?? 0), usage: Number(p.usage ?? NaN) })).filter((p) => Number.isFinite(p.ts) && Number.isFinite(p.usage)))
: []
const metadataPatch = ensureRecord(args.metadata)
const metrics = ensureRecord(args.metrics) ?? ensureRecord(metadataPatch?.["metrics"])
const metaObj: JsonRecord = ensureRecord(machine.metadata) ?? {}
const prevWindowRecords = ensureRecordArray(metaObj["cpuWindow"])
const prevWindow: Array<{ ts: number; usage: number }> = prevWindowRecords
.map((entry) => {
const ts = ensureFiniteNumber(entry["ts"])
const usage = ensureFiniteNumber(entry["usage"])
if (ts === null || usage === null) return null
return { ts, usage }
})
.filter((entry): entry is { ts: number; usage: number } => entry !== null)
const window = prevWindow.filter((p) => now - p.ts <= 5 * 60 * 1000)
const usage = Number((metrics as any)?.cpuUsagePercent ?? (metrics as any)?.cpu_usage_percent ?? NaN)
if (Number.isFinite(usage)) {
const usage =
ensureFiniteNumber(metrics?.["cpuUsagePercent"]) ?? ensureFiniteNumber(metrics?.["cpu_usage_percent"])
if (usage !== null) {
window.push({ ts: now, usage })
}
if (window.length > 0) {
@ -240,30 +288,48 @@ async function evaluatePostureAndMaybeRaise(
}
}
const inventory = args.inventory ?? (args.metadata?.inventory ?? null)
if (inventory && typeof inventory === "object") {
const services = (inventory as any).services
if (Array.isArray(services)) {
const inventory = ensureRecord(args.inventory) ?? ensureRecord(metadataPatch?.["inventory"])
if (inventory) {
const services = ensureRecordArray(inventory["services"])
if (services.length > 0) {
const criticalList = (process.env["MACHINE_CRITICAL_SERVICES"] ?? "")
.split(/[\s,]+/)
.map((s) => s.trim().toLowerCase())
.filter(Boolean)
const criticalSet = new Set(criticalList)
const firstDown = services.find((s: any) => typeof s?.name === "string" && String(s.status ?? s?.Status ?? "").toLowerCase() !== "running")
const firstDown = services.find((service) => {
const status = ensureString(service["status"]) ?? ensureString(service["Status"]) ?? ""
const name = ensureString(service["name"]) ?? ensureString(service["Name"]) ?? ""
return Boolean(name) && status.toLowerCase() !== "running"
})
if (firstDown) {
const name = String(firstDown.name ?? firstDown.Name ?? "serviço")
const name = ensureString(firstDown["name"]) ?? ensureString(firstDown["Name"]) ?? "serviço"
const sev: "warning" | "critical" = criticalSet.has(name.toLowerCase()) ? "critical" : "warning"
findings.push({ kind: "SERVICE_DOWN", message: `Serviço em falha: ${name}`, severity: sev })
}
}
const smart = (inventory as any).extended?.linux?.smart
if (Array.isArray(smart)) {
const failing = smart.find((e: any) => e?.smart_status && e.smart_status.passed === false)
if (failing) {
const model = failing?.model_name ?? failing?.model_family ?? "Disco"
const serial = failing?.serial_number ?? failing?.device?.name ?? "—"
const temp = failing?.temperature?.current ?? failing?.temperature?.value ?? null
const details = temp ? `${model} (${serial}) · ${temp}ºC` : `${model} (${serial})`
const smartEntries = getNestedRecordArray(inventory, "extended", "linux", "smart")
if (smartEntries.length > 0) {
const firstFail = smartEntries.find((disk) => {
const status = ensureString(disk["smart_status"]) ?? ensureString(disk["status"]) ?? ""
return status.toLowerCase() !== "ok"
})
if (firstFail) {
const model =
ensureString(firstFail["model_name"]) ??
ensureString(firstFail["model_family"]) ??
ensureString(firstFail["model"]) ??
"Disco"
const deviceRecord = getNestedRecord(firstFail, "device")
const serial =
ensureString(firstFail["serial_number"]) ??
ensureString(deviceRecord?.["name"]) ??
"—"
const temperatureRecord = getNestedRecord(firstFail, "temperature")
const temp =
ensureFiniteNumber(temperatureRecord?.["current"]) ??
ensureFiniteNumber(temperatureRecord?.["value"])
const details = temp !== null ? `${model} (${serial}) · ${temp}ºC` : `${model} (${serial})`
findings.push({ kind: "SMART_FAIL", message: `SMART em falha: ${details}`, severity: "critical" })
}
}
@ -279,8 +345,8 @@ async function evaluatePostureAndMaybeRaise(
postureAlerts: findings,
lastPostureAt: now,
}
const prevMeta = (machine.metadata && typeof machine.metadata === "object") ? (machine.metadata as Record<string, unknown>) : null
const lastAtPrev = typeof prevMeta?.lastPostureAt === "number" ? (prevMeta!.lastPostureAt as number) : 0
const prevMeta = ensureRecord(machine.metadata)
const lastAtPrev = ensureFiniteNumber(prevMeta?.["lastPostureAt"]) ?? 0
await ctx.db.patch(machine._id, { metadata: mergeMetadata(machine.metadata, record), updatedAt: now })
if ((process.env["MACHINE_ALERTS_CREATE_TICKETS"] ?? "true").toLowerCase() !== "true") return
@ -934,10 +1000,9 @@ export const rename = mutation({
args: {
machineId: v.id("machines"),
actorId: v.id("users"),
tenantId: v.optional(v.string()),
hostname: v.string(),
},
handler: async (ctx, { machineId, actorId, tenantId, hostname }) => {
handler: async (ctx, { machineId, actorId, hostname }) => {
// Reutiliza requireStaff através de tickets.ts helpers
const machine = await ctx.db.get(machineId)
if (!machine) {

View file

@ -204,24 +204,32 @@ async function ensureCompany(
let id: Id<"companies">
if (existing) {
const existingIsAvulso = existing.isAvulso ?? undefined
const targetIsAvulso = payload.isAvulso ?? existingIsAvulso
const targetCnpj = payload.cnpj ?? undefined
const targetDomain = payload.domain ?? undefined
const targetPhone = payload.phone ?? undefined
const targetDescription = payload.description ?? undefined
const targetAddress = payload.address ?? undefined
const needsPatch =
existing.name !== payload.name ||
(existing as any).isAvulso !== (payload.isAvulso ?? (existing as any).isAvulso) ||
existing.cnpj !== (payload.cnpj ?? undefined) ||
existing.domain !== (payload.domain ?? undefined) ||
existing.phone !== (payload.phone ?? undefined) ||
existing.description !== (payload.description ?? undefined) ||
existing.address !== (payload.address ?? undefined) ||
existingIsAvulso !== targetIsAvulso ||
(existing.cnpj ?? undefined) !== targetCnpj ||
(existing.domain ?? undefined) !== targetDomain ||
(existing.phone ?? undefined) !== targetPhone ||
(existing.description ?? undefined) !== targetDescription ||
(existing.address ?? undefined) !== targetAddress ||
existing.provisioningCode !== payload.provisioningCode
if (needsPatch) {
await ctx.db.patch(existing._id, {
name: payload.name,
isAvulso: payload.isAvulso,
cnpj: payload.cnpj,
domain: payload.domain,
phone: payload.phone,
description: payload.description,
address: payload.address,
isAvulso: targetIsAvulso,
cnpj: targetCnpj,
domain: targetDomain,
phone: targetPhone,
description: targetDescription,
address: targetAddress,
provisioningCode: payload.provisioningCode,
updatedAt: Date.now(),
})
@ -359,7 +367,7 @@ export const exportTenantSnapshot = query({
companies: companies.map((company) => ({
slug: company.slug,
name: company.name,
isAvulso: (company as any).isAvulso ?? false,
isAvulso: company.isAvulso ?? false,
cnpj: company.cnpj ?? null,
domain: company.domain ?? null,
phone: company.phone ?? null,

View file

@ -347,7 +347,7 @@ export const list = query({
priority: t.priority,
channel: t.channel,
queue: queueName,
company: company ? { id: company._id, name: company.name, isAvulso: (company as any).isAvulso ?? false } : null,
company: company ? { id: company._id, name: company.name, isAvulso: company.isAvulso ?? false } : null,
requester: requester && {
id: requester._id,
name: requester.name,
@ -377,14 +377,14 @@ export const list = query({
subcategory: subcategorySummary,
workSummary: {
totalWorkedMs: t.totalWorkedMs ?? 0,
internalWorkedMs: (t as any).internalWorkedMs ?? 0,
externalWorkedMs: (t as any).externalWorkedMs ?? 0,
internalWorkedMs: t.internalWorkedMs ?? 0,
externalWorkedMs: t.externalWorkedMs ?? 0,
activeSession: activeSession
? {
id: activeSession._id,
agentId: activeSession.agentId,
startedAt: activeSession.startedAt,
workType: (activeSession as any).workType ?? "INTERNAL",
workType: activeSession.workType ?? "INTERNAL",
}
: null,
},
@ -525,7 +525,7 @@ export const getById = query({
priority: t.priority,
channel: t.channel,
queue: queueName,
company: company ? { id: company._id, name: company.name, isAvulso: (company as any).isAvulso ?? false } : null,
company: company ? { id: company._id, name: company.name, isAvulso: company.isAvulso ?? false } : null,
requester: requester && {
id: requester._id,
name: requester.name,
@ -566,14 +566,14 @@ export const getById = query({
: null,
workSummary: {
totalWorkedMs: t.totalWorkedMs ?? 0,
internalWorkedMs: (t as any).internalWorkedMs ?? 0,
externalWorkedMs: (t as any).externalWorkedMs ?? 0,
internalWorkedMs: t.internalWorkedMs ?? 0,
externalWorkedMs: t.externalWorkedMs ?? 0,
activeSession: activeSession
? {
id: activeSession._id,
agentId: activeSession.agentId,
startedAt: activeSession.startedAt,
workType: (activeSession as any).workType ?? "INTERNAL",
workType: activeSession.workType ?? "INTERNAL",
}
: null,
},
@ -1130,14 +1130,14 @@ export const workSummary = query({
return {
ticketId,
totalWorkedMs: ticket.totalWorkedMs ?? 0,
internalWorkedMs: (ticket as any).internalWorkedMs ?? 0,
externalWorkedMs: (ticket as any).externalWorkedMs ?? 0,
internalWorkedMs: ticket.internalWorkedMs ?? 0,
externalWorkedMs: ticket.externalWorkedMs ?? 0,
activeSession: activeSession
? {
id: activeSession._id,
agentId: activeSession.agentId,
startedAt: activeSession.startedAt,
workType: (activeSession as any).workType ?? "INTERNAL",
workType: activeSession.workType ?? "INTERNAL",
}
: null,
}
@ -1275,7 +1275,7 @@ export const pauseWork = mutation({
pauseNote: note ?? "",
})
const sessionType = ((session as any).workType ?? "INTERNAL").toUpperCase()
const sessionType = (session.workType ?? "INTERNAL").toUpperCase()
const deltaInternal = sessionType === "INTERNAL" ? durationMs : 0
const deltaExternal = sessionType === "EXTERNAL" ? durationMs : 0
@ -1283,8 +1283,8 @@ export const pauseWork = mutation({
working: false,
activeSessionId: undefined,
totalWorkedMs: (ticket.totalWorkedMs ?? 0) + durationMs,
internalWorkedMs: ((ticket as any).internalWorkedMs ?? 0) + deltaInternal,
externalWorkedMs: ((ticket as any).externalWorkedMs ?? 0) + deltaExternal,
internalWorkedMs: (ticket.internalWorkedMs ?? 0) + deltaInternal,
externalWorkedMs: (ticket.externalWorkedMs ?? 0) + deltaExternal,
updatedAt: now,
})

124
docs/DEPLOY-RUNBOOK.md Normal file
View file

@ -0,0 +1,124 @@
# Deploy runbook (Swarm)
Este guia documenta o fluxo de deploy atual e os principais passos de diagnóstico/correção que resolveram o problema do front não atualizar mesmo com o CI verde.
## Visão geral (como você trabalha)
- Você dá push na `main` e aguarda o GitHub Actions concluir.
- O pipeline cria um build imutável no servidor em `/home/renan/apps/sistema.build.<release>`.
- Um symlink estável aponta para o release ativo: `/home/renan/apps/sistema.current`.
- O serviço `sistema_web` monta sempre `/home/renan/apps/sistema.current:/app`. Para atualizar, basta mudar o symlink e forçar a task.
Resultado: front/back sobem com o novo código sem editar o stack a cada release.
## Fluxo de release (mínimo)
1. Gerar build em `/home/renan/apps/sistema.build.<stamp-ou-sha>` (CI faz isso).
2. Atualizar symlink: `ln -sfn /home/renan/apps/sistema.build.<novo> /home/renan/apps/sistema.current`.
3. Rollout do serviço web: `docker service update --force sistema_web`.
4. Opcional: se o `stack.yml` mudou, aplicar: `docker stack deploy --with-registry-auth -c /home/renan/apps/sistema.build.<novo>/stack.yml sistema`.
## Stack estável (essência)
- Mount fixo: `/home/renan/apps/sistema.current:/app` (não interpolar APP_DIR).
- Comando inline (sem script), com migrations na subida:
- `command: ["bash","-lc","corepack enable && corepack prepare pnpm@9 --activate && pnpm exec prisma migrate deploy && pnpm start -p 3000"]`
- Env obrigatórias (URLs válidas):
- `DATABASE_URL=file:/app/data/db.sqlite`
- `NEXT_PUBLIC_CONVEX_URL=http://sistema_convex_backend:3210`
- `NEXT_PUBLIC_APP_URL=https://tickets.esdrasrenan.com.br`
- `BETTER_AUTH_URL=https://tickets.esdrasrenan.com.br`
- Update com `stop-first` (evita `database is locked` em SQLite) + healthcheck.
## Prisma/SQLite do stack
- O volume do stack é namespaced: `sistema_sistema_db` (não `sistema_db`).
- Ao operar Prisma fora do Swarm, use SEMPRE este volume e a mesma `DATABASE_URL`:
```
APP_DIR=/home/renan/apps/sistema.current
docker run --rm -it \
-e DATABASE_URL=file:/app/data/db.sqlite \
-v "$APP_DIR:/app" -v sistema_sistema_db:/app/data -w /app \
node:20-bullseye bash -lc 'corepack enable; corepack prepare pnpm@9 --activate; pnpm i --no-frozen-lockfile; pnpm exec prisma migrate status'
```
## Diagnóstico rápido
- Ver a task atual + erros: `docker service ps --no-trunc sistema_web`
- Logs frescos do serviço: `docker service logs --since=2m -f sistema_web`
- Spec aplicado (Args + Mounts):
```
docker service inspect sistema_web \
--format '{{json .Spec.TaskTemplate.ContainerSpec.Args}} {{json .Spec.TaskTemplate.ContainerSpec.Mounts}}'
```
- Envs do serviço: `docker service inspect sistema_web --format '{{json .Spec.TaskTemplate.ContainerSpec.Env}}'`
## O incidente (front não atualizava) — causa e correções
Sintomas:
- Actions verde, mas UI antiga; logs com rollbacks de `docker service update`.
Causas encontradas:
1) Serviço ainda montava build antigo e comando antigo (spec não mudava).
- Inspect mostrava `Source=/home/renan/apps/sistema.build.<antigo> -> /app` e comando inline antigo.
- Correção: redeploy do stack com mount em `/home/renan/apps/sistema.current` + `docker service update --force sistema_web`.
2) Migration P3009 ("failed migrations") no SQLite do stack.
- Motivo: resolver/aplicar migrations no volume errado (`sistema_db`), enquanto o serviço usa `sistema_sistema_db`.
- Correção determinística:
- `docker service scale sistema_web=0`
- `prisma migrate resolve --rolled-back 20251015223259_add_company_provisioning_code` no volume `sistema_sistema_db` (comando acima em "Prisma/SQLite do stack").
- `pnpm exec prisma migrate deploy`
- `docker service scale sistema_web=1` (ou `update --force`).
3) Rollback por script ausente (`/app/scripts/start-web.sh`).
- Task caía com exit 127 porque o build não continha o script.
- Correção: voltar ao comando inline no stack (sem depender do script) OU garantir o script no build e executável.
4) Falha de env (Invalid URL em `NEXT_PUBLIC_APP_URL`/`BETTER_AUTH_URL`).
- Correção: definir URLs válidas no stack ou via `docker service update --env-add ...`.
## Cheatsheet de correções
- Forçar rollout da task:
- `docker service update --force sistema_web`
- Aplicar build novo (sem tocar stack):
- `ln -sfn /home/renan/apps/sistema.build.<novo> /home/renan/apps/sistema.current`
- `docker service update --force sistema_web`
- Corrigir mount/args no serviço (hotfix):
```
docker service update \
--mount-rm target=/app \
--mount-add type=bind,src=/home/renan/apps/sistema.current,dst=/app \
--args 'bash -lc "corepack enable && corepack prepare pnpm@9 --activate && pnpm exec prisma migrate deploy && pnpm start -p 3000"' \
sistema_web
```
- Resolver P3009 (volume certo) e aplicar migrations:
```
APP_DIR=/home/renan/apps/sistema.current
docker service scale sistema_web=0
docker run --rm -it -e DATABASE_URL=file:/app/data/db.sqlite \
-v "$APP_DIR:/app" -v sistema_sistema_db:/app/data -w /app \
node:20-bullseye bash -lc 'corepack enable; corepack prepare pnpm@9 --activate; pnpm i --no-frozen-lockfile; pnpm exec prisma migrate resolve --rolled-back 20251015223259_add_company_provisioning_code; pnpm exec prisma migrate deploy'
docker service scale sistema_web=1
```
- Criar DB se faltar (P1003):
- `docker run --rm -v sistema_sistema_db:/data busybox sh -lc ': >/data/db.sqlite'`
- Ajustar envs em runtime:
- `docker service update --env-add NEXT_PUBLIC_APP_URL=https://tickets.esdrasrenan.com.br --env-add BETTER_AUTH_URL=https://tickets.esdrasrenan.com.br sistema_web`
## Notas finais
- Como o stack monta `/home/renan/apps/sistema.current`, um novo release exige apenas atualizar o symlink e forçar a task. O `stack.yml` só precisa ser redeployado quando você altera labels/envs/serviços.
- Se a UI parecer não mudar, valide o mount/args via inspect, confira logs da task atual e force hardreload no navegador.

View file

@ -1,79 +1,68 @@
# Guia de DEV — Banco (Prisma), Auth e Desktop (Tauri)
# Guia de Desenvolvimento — 16/10/2025
Este guia descreve o que foi corrigido, por quê e como seguir no DEV, incluindo como gerar o executável do Desktop (Tauri) localmente.
Este documento consolida o estado atual do ambiente de desenvolvimento, descreve como rodar lint/test/build localmente (e no CI) e registra erros recorrentes com as respectivas soluções.
## O que foi feito e por quê
## Resumo rápido
- Ajuste do Prisma no DEV (P2021)
- Problema: o Prisma CLI lia o `.env` da raiz (produção) e o banco local não tinha as tabelas do Better Auth (ex.: `AuthUser`, `AuthSession`).
- Decisão: evitar conflito entre `.env` (raiz) e `prisma/.env` — usamos `DATABASE_URL=...` inline nos comandos do Prisma, apontando para `./prisma/db.dev.sqlite` apenas no DEV.
- Resultado: banco `prisma/prisma/db.dev.sqlite` criado/sincronizado e seed de usuários executado.
- **Node/PNPM**: Node 20 + pnpm 9 (habilite via `corepack enable && corepack prepare pnpm@9 --activate`).
- **Lint/Test/Build**: `pnpm lint`, `pnpm test`, `pnpm build`. O script de testes usa `vitest --run --passWithNoTests`, eliminando o modo watch interativo.
- **Banco DEV**: SQLite em `prisma/prisma/db.dev.sqlite`. Defina `DATABASE_URL="file:./prisma/db.dev.sqlite"` ao chamar CLI do Prisma.
- **Desktop (Tauri)**: fonte em `apps/desktop`. Usa Radix tabs + componentes shadcn-like, integra com os endpoints `/api/machines/*` e suporta atualização automática via GitHub Releases.
- **CI**: Workflow `Quality Checks` roda lint/test/build para pushes e PRs na `main`, além do pipeline de deploy existente.
- Migração das abas do Desktop para shadcn/Radix
- O Tauri não roda Next.js. Para manter o visual consistente com o web, migramos as abas para um wrapper shadcn-like usando Radix Tabs e Tailwind (`apps/desktop/src/components/ui/tabs.tsx`).
- Incluímos badge de status equivalente ao web e o botão “Enviar inventário agora” (POST `/api/machines/inventory`).
## Banco de dados (Prisma)
- CI (GitHub Actions) e lockfile
- Erro resolvido: `ERR_PNPM_OUTDATED_LOCKFILE` em `apps/desktop`. Atualizamos o `pnpm-lock.yaml` para refletir as novas dependências do Desktop.
- O workflow do desktop usa `--frozen-lockfile`; manter o lockfile em sincronia evita falhas.
1. Gere/atualize o schema local:
## Fluxo de banco (DEV)
```bash
DATABASE_URL="file:./prisma/db.dev.sqlite" pnpm exec prisma db push
DATABASE_URL="file:./prisma/db.dev.sqlite" pnpm prisma:generate
DATABASE_URL="file:./prisma/db.dev.sqlite" pnpm auth:seed
```
- Banco de DEV: `file:./prisma/db.dev.sqlite` (arquivo: `prisma/prisma/db.dev.sqlite`).
- Comandos (forçar alvo de DEV no terminal atual):
2. Rode o app Next.js:
```
DATABASE_URL="file:./prisma/db.dev.sqlite" pnpm exec prisma db push
DATABASE_URL="file:./prisma/db.dev.sqlite" pnpm prisma:generate
DATABASE_URL="file:./prisma/db.dev.sqlite" pnpm auth:seed
```
```bash
pnpm dev
```
- Rodar app local (Next):
3. Credenciais padrão (seed): `admin@sistema.dev / admin123`.
```
pnpm dev
```
> **Por quê inline?** Evitamos declarar `DATABASE_URL` em `prisma/.env` porque o Prisma lê também o `.env` da raiz (produção). O override inline garante isolamento do banco DEV.
- Login de teste (DEV): `admin@sistema.dev / admin123`
## Comandos de qualidade
- Prisma Studio (DEV):
- `pnpm lint`: executa ESLint (flat config) sobre os arquivos do projeto.
- `pnpm test`: Vitest em modo não interativo (`--run --passWithNoTests`). Use `pnpm test -- --watch` somente quando quiser rodar em watch localmente.
- `pnpm build`: `next build --turbopack`.
- `pnpm prisma:generate`: necessário antes do build quando o client Prisma muda.
```
DATABASE_URL="file:./prisma/db.dev.sqlite" pnpm exec prisma studio
```
### Automação no CI
Observação: evitar `prisma/.env` nesse setup, pois causa conflito com o `.env` da raiz (o Prisma acusa conflitos e falha o comando). Manter o override inline é a forma mais segura de isolar DEV sem tocar produção.
Arquivo: `.github/workflows/quality-checks.yml`
Etapas:
1. Instala dependências (`pnpm install --frozen-lockfile`).
2. `pnpm prisma:generate`.
3. `pnpm lint`.
4. `pnpm test`.
5. `pnpm build`.
O workflow dispara em todo `push`/`pull_request` para `main` e fornece feedback imediato sem depender do pipeline de deploy.
## Desktop (Tauri)
- Onde foram feitas as mudanças principais:
- `apps/desktop/src/components/ui/tabs.tsx` (Tabs Radix + estilos shadcn-like)
- `apps/desktop/src/main.tsx` (layout com abas: Resumo/Inventário/Diagnóstico/Configurações; status badge; botão “Enviar inventário agora”; seleção do perfil de acesso colaborador/gestor e sincronização do usuário vinculado).
- `apps/desktop/src-tauri/src/agent.rs` (coleta e normalização de hardware, discos, GPUs e inventário estendido por SO).
- Tabs Radix + estilos shadcn: `apps/desktop/src/components/ui/tabs.tsx`.
- Painel principal: `apps/desktop/src/main.tsx` — abas Resumo/Inventário/Diagnóstico/Configurações, envio manual de inventário, seleção de persona (colaborador/gestor) e vínculo com usuário.
- Coleta/hardware: `apps/desktop/src-tauri/src/agent.rs`.
- Variáveis de build:
- `VITE_APP_URL` (URL Web).
- `VITE_API_BASE_URL` (API).
- Variáveis de ambiente do Desktop (em tempo de build):
- `VITE_APP_URL` e `VITE_API_BASE_URL` — por padrão, use a URL da aplicação web.
### Build local
### Atualizações automáticas (GitHub)
1. Gere o par de chaves do updater (`pnpm tauri signer generate -- -w ~/.tauri/raven.key`) e configure as variáveis de ambiente `TAURI_SIGNING_PRIVATE_KEY` e `TAURI_SIGNING_PRIVATE_KEY_PASSWORD` antes de rodar `pnpm -C apps/desktop tauri build`.
2. Garanta que `bundle.createUpdaterArtifacts` esteja habilitado (já configurado) para gerar os pacotes `.nsis`/`.AppImage` e os arquivos `.sig`.
3. Publique os artefatos de cada SO em um release do GitHub e atualize o `latest.json` público (ex.: no próprio repositório ou em um gist) com `version`, `notes`, `pub_date` e as entradas por plataforma (`url` e `signature`).
4. O agente já consulta o updater ao iniciar e também possui o botão “Verificar atualizações” na aba Configurações. Ao detectar nova versão o download é feito em segundo plano e o app reinicia automaticamente após o `downloadAndInstall`.
### Build do executável localmente
Você pode gerar o executável local sem precisar da VPS. O que muda é apenas o sistema operacional alvo (Linux/Windows/macOS). O Tauri recomenda compilar em cada SO para obter o bundle nativo desse SO. Em produção, o GitHub Actions já faz isso em matriz.
1) Pré-requisitos gerais
- Node 20 + pnpm 9 (via Corepack)
- Rust (stable) — `rustup` instalado
- Dependências do sistema (Linux):
- `libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev libxdo-dev libssl-dev build-essential curl wget file`
2) Instalar deps do desktop e buildar
```
```bash
corepack enable && corepack prepare pnpm@9 --activate
pnpm -C apps/desktop install
VITE_APP_URL=http://localhost:3000 \
@ -81,27 +70,28 @@ VITE_API_BASE_URL=http://localhost:3000 \
pnpm -C apps/desktop tauri build
```
3) Saída do build
- Os artefatos ficam em: `apps/desktop/src-tauri/target/release/bundle/`
- No Linux: `.AppImage`/`.deb`/`.rpm` (conforme target)
- No Windows/macOS: executável/instalador específicos do SO (para assinatura, usar chaves/AC, se desejado)
- Para liberar atualizações OTA, publique release no GitHub com artefatos e `latest.json` — o plugin de updater verifica a URL configurada em `tauri.conf.json`.
Artefatos: `apps/desktop/src-tauri/target/release/bundle/`.
### Build na VPS x Local
### Atualizações OTA
- Não há diferença funcional além do SO alvo e de possíveis chaves de assinatura. Use a VPS apenas se quiser gerar pacotes Linux em ambiente isolado. Para Windows/macOS, é preferível buildar nesses SOs ou usar a matriz do GitHub Actions (já configurada).
1. Gere chaves (`pnpm tauri signer generate`).
2. Defina `TAURI_SIGNING_PRIVATE_KEY` (+ password) no ambiente de build.
3. Publique os pacotes e um `latest.json` em release GitHub.
4. O app verifica ao iniciar e pelo botão “Verificar atualizações”.
## CI/CD — Observações
## Erros recorrentes e soluções
- `desktop-release.yml` (Tauri): instala dependências, faz build por SO e publica artefatos. Mantendo o `pnpm-lock.yaml` atualizado, o passo `--frozen-lockfile` passa.
- `ci-cd-web-desktop.yml`: já usa `pnpm install --no-frozen-lockfile` no web, evitando falhas em pipelines de integração.
- Smoke de provisionamento pode ser desligado definindo `RUN_MACHINE_SMOKE=false` (default); quando quiser exercitar o fluxo complete register/heartbeat, defina `RUN_MACHINE_SMOKE=true`.
| Sintoma | Causa | Correção |
| --- | --- | --- |
| `ERR_PNPM_OUTDATED_LOCKFILE` no pipeline | Dependências do desktop alteradas sem atualizar `pnpm-lock.yaml` | Rodar `pnpm install` na raiz e commitar o lockfile. |
| Prisma falha com `P2021` / tabelas Better Auth inexistentes | CLI leu `.env` da raiz (produção) | Usar `DATABASE_URL="file:./prisma/db.dev.sqlite"` nos comandos. |
| Vitest trava em modo watch | Script `pnpm test` sem `--run` e CI detecta TTY | Ajustado para `vitest --run --passWithNoTests`. Localmente, use `pnpm test -- --watch` se quiser. |
| Desktop não encontra updater | Falta `latest.json` ou assinatura inválida | Publicar release com `*.sig` e `latest.json` apontando para os pacotes corretos. |
## Troubleshooting
## Referências úteis
- Sign-in 500 após `db push`/seed:
- Verifique o terminal do app e confirme a existência da tabela `AuthUser` no Prisma Studio (alvo DEV).
- **Deploy (Swarm)**: veja `docs/DEPLOY-RUNBOOK.md`.
- **Plano do agente desktop / heartbeat**: `docs/plano-app-desktop-maquinas.md`.
- **Histórico de incidentes**: `docs/historico-agente-desktop-2025-10-10.md`.
- `ERR_PNPM_OUTDATED_LOCKFILE` no Desktop:
- Atualize `pnpm-lock.yaml` no root após alterar dependências de `apps/desktop/package.json`.
- Alternativa: usar `--no-frozen-lockfile` (não recomendado para releases reproduzíveis).
> Última revisão: 16/10/2025. Atualize este guia sempre que o fluxo de DEV ou automações mudarem.

51
docs/STATUS-2025-10-16.md Normal file
View file

@ -0,0 +1,51 @@
# Status do Projeto — 16/10/2025
Documento de referência sobre o estado atual do sistema (web + desktop), melhorias recentes e pontos de atenção.
## 1. Panorama
- **Web (Next.js 15 + Convex)**: build limpo (`pnpm build`), lint sem avisos e testes estáveis (Vitest em modo não interativo).
- **Desktop (Tauri)**: fluxo de provisionamento e heartbeat operacional; inventário consolidado com coleta multi-plataforma; atualizações OTA suportadas.
- **CI**: workflow `Quality Checks` roda lint/test/build em todo push/PR na `main`; pipeline de deploy (`ci-cd-web-desktop.yml`) permanece responsável por sincronizar com a VPS.
- **Infra**: deploy documentado no runbook (Swarm com symlink `sistema.current`). Migrações Prisma e variáveis críticas mapeadas.
## 2. Melhorias concluídas em 16/10/2025
| Item | Descrição | Impacto |
| --- | --- | --- |
| **Centralização Convex** | Helpers `createConvexClient` e normalização do cookie da máquina (`src/server/convex-client.ts`, `src/server/machines/context.ts`). | Código das rotas `/api/machines/*` ficou mais enxuto e resiliente a erros de configuração. |
| **Auth/Login redirect** | Redirecionamento baseado em role/persona sem uso de `any`, com dependências explícitas (`src/app/login/login-page-client.tsx`). | Evita warnings de hooks e garante rota correta para máquinas/colaboradores. |
| **Ticket header** | Sincronização do responsável com dependências completas (`ticket-summary-header.tsx`). | Removeu warning do lint e previne estados inconsistentes. |
| **Posture / inventário** | Type guards e normalização de métricas SMART/serviços (`convex/machines.ts`). | Reduziu `any`, melhorou detecção de alertas e consistência do metadata. |
| **Docs** | Revisão completa de `docs/DEV.md`, novo `STATUS-2025-10-16.md`, estrutura uniforme e casos de erro registrados. | Documentação enxuta e atualizada, com trilhas claras para DEV/CI/Deploy. |
| **Testes no CI** | Novo workflow `.github/workflows/quality-checks.yml` e script `pnpm test` em modo não-interativo. | Previne “travamentos” e garante checagens de qualidade automáticas. |
## 3. Pontos de atenção (curto prazo)
- **Migrações Prisma em produção**: qualquer mudança requer executar no volume `sistema_sistema_db` (ver `docs/DEPLOY-RUNBOOK.md`). Atenção para evitar regressões P3009.
- **Atualização dos artefatos Tauri**: releases exigem `latest.json` atualizado e assinatura (`*.sig`). Automação via GitHub Actions já preparada, mas depende de manter as chaves seguras.
- **Seeds Better Auth**: se novos perfis/roles forem adicionados, atualizar `scripts/seed-auth.mjs` e o seed do Convex.
- **Variáveis críticas**: `NEXT_PUBLIC_APP_URL`, `BETTER_AUTH_URL`, `MACHINE_PROVISIONING_SECRET` e `NEXT_PUBLIC_CONVEX_URL` devem ser válidas no stack — qualquer alteração de domínio implica revisar `.env` e `stack.yml`.
## 4. Backlog recomendado
1. **Testes end-to-end**: cobrir fluxo de provisionamento (desktop ↔ API) com smoke automatizado (pode rodar condicional no CI).
2. **Autenticação agnóstica**: avaliar suporte para Clerk/Auth0 conforme docs do Convex (custom JWTs).
3. **Observabilidade**: adicionar métricas/alertas para heartbeats em atraso (Convex + dashboards).
4. **Documentação do Desktop Installer**: guias por SO sobre instalação/assinatura e troubleshooting do updater.
## 5. Casos de erro conhecidos
| Cenário | Sintoma | Como resolver |
| --- | --- | --- |
| Token de máquina revogado | POST `/api/machines/sessions` retorna 401 e desktop volta ao onboarding | Reprovisionar pela UI do agente; garantir que `machineToken` foi atualizado. |
| Falha de heartbeat | Logs com `Falha ao registrar heartbeat` + status 500 | Verificar `NEXT_PUBLIC_CONVEX_URL` e conectividade. Roda `pnpm convext:dev` em DEV para confirmar schema. |
| Updater sem atualização | Desktop fica em “Procurando atualização” indefinidamente | Confirmar release publicado com `latest.json` apontando para URLs públicas do bundle e assinaturas válidas. |
## 6. Próximos passos imediatos
- Monitorar execução do novo workflow de qualidade em PRs.
- Garantir que a equipe esteja ciente do procedimento atualizado de deploy (symlink + service update).
- Revisar backlog acima e priorizar smoke tests para o fluxo da máquina.
_Última atualização: 16/10/2025 (UTC-3)._

View file

@ -9,7 +9,7 @@
"lint": "eslint",
"prisma:generate": "prisma generate",
"convex:dev": "convex dev",
"test": "vitest",
"test": "vitest --run --passWithNoTests",
"auth:seed": "node scripts/seed-auth.mjs",
"queues:ensure": "node scripts/ensure-default-queues.mjs",
"desktop:dev": "pnpm --filter appsdesktop tauri dev",

View file

@ -1,6 +1,4 @@
import { randomBytes } from "crypto"
import { ConvexHttpClient } from "convex/browser"
import { Prisma } from "@prisma/client"
import { api } from "@/convex/_generated/api"
import { DEFAULT_TENANT_ID } from "@/lib/constants"
@ -8,6 +6,7 @@ import { env } from "@/lib/env"
import { normalizeSlug, slugify } from "@/lib/slug"
import { prisma } from "@/lib/prisma"
import { createCorsPreflight, jsonWithCors } from "@/server/cors"
import { createConvexClient, ConvexConfigurationError } from "@/server/convex-client"
export const runtime = "nodejs"
@ -33,10 +32,7 @@ function extractSecret(request: Request, url: URL): string | null {
}
async function ensureConvexCompany(params: { tenantId: string; slug: string; name: string; provisioningCode: string }) {
if (!env.NEXT_PUBLIC_CONVEX_URL) {
throw new Error("Convex não configurado")
}
const client = new ConvexHttpClient(env.NEXT_PUBLIC_CONVEX_URL)
const client = createConvexClient()
await client.mutation(api.companies.ensureProvisioned, params)
}
@ -161,12 +157,19 @@ export async function POST(request: Request) {
},
}))
await ensureConvexCompany({
tenantId,
slug: company.slug,
name: company.name,
provisioningCode: company.provisioningCode,
})
try {
await ensureConvexCompany({
tenantId,
slug: company.slug,
name: company.name,
provisioningCode: company.provisioningCode,
})
} catch (error) {
if (error instanceof ConvexConfigurationError) {
return jsonWithCors({ error: error.message }, 500, origin, CORS_METHODS)
}
throw error
}
return jsonWithCors(
{
@ -188,12 +191,19 @@ export async function POST(request: Request) {
try {
const fallback = await prisma.company.findFirst({ where: { tenantId, slug: normalizedSlug } })
if (fallback) {
await ensureConvexCompany({
tenantId,
slug: fallback.slug,
name: fallback.name,
provisioningCode: fallback.provisioningCode,
})
try {
await ensureConvexCompany({
tenantId,
slug: fallback.slug,
name: fallback.name,
provisioningCode: fallback.provisioningCode,
})
} catch (error) {
if (error instanceof ConvexConfigurationError) {
return jsonWithCors({ error: error.message }, 500, origin, CORS_METHODS)
}
throw error
}
return jsonWithCors(
{
company: {

View file

@ -1,9 +1,8 @@
import { z } from "zod"
import { ConvexHttpClient } from "convex/browser"
import { api } from "@/convex/_generated/api"
import { env } from "@/lib/env"
import { createCorsPreflight, jsonWithCors } from "@/server/cors"
import { createConvexClient, ConvexConfigurationError } from "@/server/convex-client"
const heartbeatSchema = z.object({
machineToken: z.string().min(1),
@ -28,13 +27,19 @@ export async function OPTIONS(request: Request) {
}
export async function POST(request: Request) {
const origin = request.headers.get("origin")
if (request.method !== "POST") {
return jsonWithCors({ error: "Método não permitido" }, 405, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors({ error: "Método não permitido" }, 405, origin, CORS_METHODS)
}
const convexUrl = env.NEXT_PUBLIC_CONVEX_URL
if (!convexUrl) {
return jsonWithCors({ error: "Convex não configurado" }, 500, request.headers.get("origin"), CORS_METHODS)
let client
try {
client = createConvexClient()
} catch (error) {
if (error instanceof ConvexConfigurationError) {
return jsonWithCors({ error: error.message }, 500, origin, CORS_METHODS)
}
throw error
}
let payload
@ -45,19 +50,17 @@ export async function POST(request: Request) {
return jsonWithCors(
{ error: "Payload inválido", details: error instanceof Error ? error.message : String(error) },
400,
request.headers.get("origin"),
origin,
CORS_METHODS
)
}
const client = new ConvexHttpClient(convexUrl)
try {
const response = await client.mutation(api.machines.heartbeat, payload)
return jsonWithCors(response, 200, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors(response, 200, origin, CORS_METHODS)
} catch (error) {
console.error("[machines.heartbeat] Falha ao registrar heartbeat", error)
const details = error instanceof Error ? error.message : String(error)
return jsonWithCors({ error: "Falha ao registrar heartbeat", details }, 500, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors({ error: "Falha ao registrar heartbeat", details }, 500, origin, CORS_METHODS)
}
}

View file

@ -1,9 +1,8 @@
import { z } from "zod"
import { ConvexHttpClient } from "convex/browser"
import { api } from "@/convex/_generated/api"
import { env } from "@/lib/env"
import { createCorsPreflight, jsonWithCors } from "@/server/cors"
import { createConvexClient, ConvexConfigurationError } from "@/server/convex-client"
const tokenModeSchema = z.object({
machineToken: z.string().min(1),
@ -41,9 +40,16 @@ export async function OPTIONS(request: Request) {
}
export async function POST(request: Request) {
const convexUrl = env.NEXT_PUBLIC_CONVEX_URL
if (!convexUrl) {
return jsonWithCors({ error: "Convex não configurado" }, 500, request.headers.get("origin"), CORS_METHODS)
const origin = request.headers.get("origin")
let client
try {
client = createConvexClient()
} catch (error) {
if (error instanceof ConvexConfigurationError) {
return jsonWithCors({ error: error.message }, 500, origin, CORS_METHODS)
}
throw error
}
let raw: unknown
@ -53,13 +59,11 @@ export async function POST(request: Request) {
return jsonWithCors(
{ error: "Payload inválido", details: error instanceof Error ? error.message : String(error) },
400,
request.headers.get("origin"),
origin,
CORS_METHODS
)
}
const client = new ConvexHttpClient(convexUrl)
// Modo A: com token da máquina (usa heartbeat para juntar inventário)
const tokenParsed = tokenModeSchema.safeParse(raw)
if (tokenParsed.success) {
@ -71,11 +75,11 @@ export async function POST(request: Request) {
metrics: tokenParsed.data.metrics,
inventory: tokenParsed.data.inventory,
})
return jsonWithCors({ ok: true, machineId: result.machineId, expiresAt: result.expiresAt }, 200, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors({ ok: true, machineId: result.machineId, expiresAt: result.expiresAt }, 200, origin, CORS_METHODS)
} catch (error) {
console.error("[machines.inventory:token] Falha ao atualizar inventário", error)
const details = error instanceof Error ? error.message : String(error)
return jsonWithCors({ error: "Falha ao atualizar inventário", details }, 500, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors({ error: "Falha ao atualizar inventário", details }, 500, origin, CORS_METHODS)
}
}
@ -90,16 +94,16 @@ export async function POST(request: Request) {
macAddresses: provParsed.data.macAddresses,
serialNumbers: provParsed.data.serialNumbers,
inventory: provParsed.data.inventory,
metrics: provParsed.data.metrics,
registeredBy: provParsed.data.registeredBy ?? "agent:inventory",
metrics: provParsed.data.metrics,
registeredBy: provParsed.data.registeredBy ?? "agent:inventory",
})
return jsonWithCors({ ok: true, machineId: result.machineId, status: result.status }, 200, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors({ ok: true, machineId: result.machineId, status: result.status }, 200, origin, CORS_METHODS)
} catch (error) {
console.error("[machines.inventory:prov] Falha ao fazer upsert de inventário", error)
const details = error instanceof Error ? error.message : String(error)
return jsonWithCors({ error: "Falha ao fazer upsert de inventário", details }, 500, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors({ error: "Falha ao fazer upsert de inventário", details }, 500, origin, CORS_METHODS)
}
}
return jsonWithCors({ error: "Formato de payload não suportado" }, 400, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors({ error: "Formato de payload não suportado" }, 400, origin, CORS_METHODS)
}

View file

@ -1,9 +1,7 @@
import { ConvexHttpClient } from "convex/browser"
import { api } from "@/convex/_generated/api"
import { prisma } from "@/lib/prisma"
import { env } from "@/lib/env"
import { createCorsPreflight, jsonWithCors } from "@/server/cors"
import { createConvexClient, ConvexConfigurationError } from "@/server/convex-client"
export const runtime = "nodejs"
@ -54,16 +52,18 @@ export async function POST(request: Request) {
return jsonWithCors({ error: "Código de provisionamento inválido" }, 404, origin, CORS_METHODS)
}
if (env.NEXT_PUBLIC_CONVEX_URL) {
try {
const client = new ConvexHttpClient(env.NEXT_PUBLIC_CONVEX_URL)
await client.mutation(api.companies.ensureProvisioned, {
tenantId: company.tenantId,
slug: company.slug,
name: company.name,
provisioningCode: company.provisioningCode,
})
} catch (error) {
try {
const client = createConvexClient()
await client.mutation(api.companies.ensureProvisioned, {
tenantId: company.tenantId,
slug: company.slug,
name: company.name,
provisioningCode: company.provisioningCode,
})
} catch (error) {
if (error instanceof ConvexConfigurationError) {
console.warn("[machines.provisioning] Convex não configurado; ignorando sincronização de empresa.")
} else {
console.error("[machines.provisioning] Falha ao sincronizar empresa no Convex", error)
}
}

View file

@ -1,13 +1,12 @@
import { z } from "zod"
import { ConvexHttpClient } from "convex/browser"
import { api } from "@/convex/_generated/api"
import type { Id } from "@/convex/_generated/dataModel"
import { env } from "@/lib/env"
import { DEFAULT_TENANT_ID } from "@/lib/constants"
import { ensureCollaboratorAccount, ensureMachineAccount } from "@/server/machines-auth"
import { createCorsPreflight, jsonWithCors } from "@/server/cors"
import { prisma } from "@/lib/prisma"
import { createConvexClient, ConvexConfigurationError } from "@/server/convex-client"
const registerSchema = z
.object({
@ -42,13 +41,19 @@ export async function OPTIONS(request: Request) {
}
export async function POST(request: Request) {
const origin = request.headers.get("origin")
if (request.method !== "POST") {
return jsonWithCors({ error: "Método não permitido" }, 405, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors({ error: "Método não permitido" }, 405, origin, CORS_METHODS)
}
const convexUrl = env.NEXT_PUBLIC_CONVEX_URL
if (!convexUrl) {
return jsonWithCors({ error: "Convex não configurado" }, 500, request.headers.get("origin"), CORS_METHODS)
let client
try {
client = createConvexClient()
} catch (error) {
if (error instanceof ConvexConfigurationError) {
return jsonWithCors({ error: error.message }, 500, origin, CORS_METHODS)
}
throw error
}
let payload
@ -59,12 +64,11 @@ export async function POST(request: Request) {
return jsonWithCors(
{ error: "Payload inválido", details: error instanceof Error ? error.message : String(error) },
400,
request.headers.get("origin"),
origin,
CORS_METHODS
)
}
const client = new ConvexHttpClient(convexUrl)
try {
const provisioningCode = payload.provisioningCode.trim().toLowerCase()
const companyRecord = await prisma.company.findFirst({
@ -76,7 +80,7 @@ export async function POST(request: Request) {
return jsonWithCors(
{ error: "Código de provisionamento inválido" },
404,
request.headers.get("origin"),
origin,
CORS_METHODS
)
}
@ -89,7 +93,7 @@ export async function POST(request: Request) {
return jsonWithCors(
{ error: "Informe os dados do colaborador/gestor ao definir o perfil de acesso." },
400,
request.headers.get("origin"),
origin,
CORS_METHODS
)
}
@ -195,7 +199,7 @@ export async function POST(request: Request) {
collaborator: collaborator ?? null,
},
{ status: 201 },
request.headers.get("origin"),
origin,
CORS_METHODS
)
} catch (error) {
@ -207,6 +211,6 @@ export async function POST(request: Request) {
const isConvexError = msg.includes("convexerror")
const status = isInvalidCode ? 401 : isCompanyNotFound ? 404 : isConvexError ? 400 : 500
const payload = { error: "Falha ao provisionar máquina", details }
return jsonWithCors(payload, status, request.headers.get("origin"), CORS_METHODS)
return jsonWithCors(payload, status, origin, CORS_METHODS)
}
}

View file

@ -1,105 +1,43 @@
import { NextRequest, NextResponse } from "next/server"
import { cookies } from "next/headers"
import { ConvexHttpClient } from "convex/browser"
import { api } from "@/convex/_generated/api"
import type { Id } from "@/convex/_generated/dataModel"
import { env } from "@/lib/env"
import { assertAuthenticatedSession } from "@/lib/auth-server"
import { DEFAULT_TENANT_ID } from "@/lib/constants"
const MACHINE_CTX_COOKIE = "machine_ctx"
import { createConvexClient, ConvexConfigurationError } from "@/server/convex-client"
import {
MACHINE_CTX_COOKIE,
extractCollaboratorFromMetadata,
parseMachineCookie,
serializeMachineCookie,
type CollaboratorMetadata,
type MachineContextCookiePayload,
} from "@/server/machines/context"
// Força runtime Node.js para leitura consistente de cookies de sessão
export const runtime = "nodejs"
type CollaboratorMetadata = {
email: string
name: string | null
role: "collaborator" | "manager" | null
}
function decodeMachineCookie(value: string) {
try {
const json = Buffer.from(value, "base64url").toString("utf8")
return JSON.parse(json) as {
machineId: string
persona: string | null
assignedUserId: string | null
assignedUserEmail: string | null
assignedUserName: string | null
assignedUserRole: string | null
}
} catch {
return null
}
}
function encodeMachineCookie(payload: {
machineId: string
persona: string | null
assignedUserId: string | null
assignedUserEmail: string | null
assignedUserName: string | null
assignedUserRole: string | null
}) {
return Buffer.from(JSON.stringify(payload)).toString("base64url")
}
function extractCollaboratorFromMetadata(metadata: unknown): CollaboratorMetadata | null {
if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) {
return null
}
const record = metadata as Record<string, unknown>
const raw = record["collaborator"]
if (!raw || typeof raw !== "object" || Array.isArray(raw)) {
return null
}
const base = raw as Record<string, unknown>
const emailValue = base["email"]
if (typeof emailValue !== "string") {
return null
}
const email = emailValue.trim().toLowerCase()
if (!email) {
return null
}
const nameValue = base["name"]
const roleValue = base["role"]
const name = typeof nameValue === "string" ? (nameValue.trim() || null) : null
const normalizedRole =
typeof roleValue === "string" ? roleValue.trim().toLowerCase() : null
const role =
normalizedRole === "manager"
? "manager"
: normalizedRole === "collaborator"
? "collaborator"
: null
return {
email,
name,
role,
}
}
export async function GET(request: NextRequest) {
const session = await assertAuthenticatedSession()
if (!session || session.user?.role !== "machine") {
return NextResponse.json({ error: "Sessão de máquina não encontrada." }, { status: 403 })
}
const convexUrl = env.NEXT_PUBLIC_CONVEX_URL
if (!convexUrl) {
return NextResponse.json({ error: "Convex não configurado." }, { status: 500 })
let client
try {
client = createConvexClient()
} catch (error) {
if (error instanceof ConvexConfigurationError) {
return NextResponse.json({ error: error.message }, { status: 500 })
}
throw error
}
const client = new ConvexHttpClient(convexUrl)
const cookieStore = await cookies()
const cookieValue = cookieStore.get(MACHINE_CTX_COOKIE)?.value ?? null
const decoded = cookieValue ? decodeMachineCookie(cookieValue) : null
const decoded = parseMachineCookie(cookieValue)
let machineId: Id<"machines"> | null = decoded?.machineId ? (decoded.machineId as Id<"machines">) : null
if (!machineId) {
@ -136,7 +74,7 @@ export async function GET(request: NextRequest) {
authEmail: string | null
}
const metadataCollaborator = extractCollaboratorFromMetadata(context.metadata)
const metadataCollaborator: CollaboratorMetadata | null = extractCollaboratorFromMetadata(context.metadata)
let ensuredAssignedUserId = context.assignedUserId
let ensuredAssignedUserEmail = context.assignedUserEmail ?? metadataCollaborator?.email ?? null
@ -200,13 +138,13 @@ export async function GET(request: NextRequest) {
ensuredPersona ??
(ensuredAssignedUserRole ? ensuredAssignedUserRole.toLowerCase() : null)
const responsePayload = {
const responsePayload: MachineContextCookiePayload = {
machineId: context.id,
persona: resolvedPersona,
assignedUserId: ensuredAssignedUserId,
assignedUserEmail: ensuredAssignedUserEmail,
assignedUserName: ensuredAssignedUserName,
assignedUserRole: ensuredAssignedUserRole,
persona: resolvedPersona ?? null,
assignedUserId: ensuredAssignedUserId ?? null,
assignedUserEmail: ensuredAssignedUserEmail ?? null,
assignedUserName: ensuredAssignedUserName ?? null,
assignedUserRole: ensuredAssignedUserRole ?? null,
}
const response = NextResponse.json({
@ -224,7 +162,7 @@ export async function GET(request: NextRequest) {
const isSecure = request.nextUrl.protocol === "https:"
response.cookies.set({
name: MACHINE_CTX_COOKIE,
value: encodeMachineCookie(responsePayload),
value: serializeMachineCookie(responsePayload),
httpOnly: true,
sameSite: "lax",
secure: isSecure,

View file

@ -2,6 +2,11 @@ import { NextResponse } from "next/server"
import { z } from "zod"
import { createMachineSession } from "@/server/machines-session"
import { applyCorsHeaders, createCorsPreflight, jsonWithCors } from "@/server/cors"
import {
MACHINE_CTX_COOKIE,
serializeMachineCookie,
type MachineContextCookiePayload,
} from "@/server/machines/context"
const sessionSchema = z.object({
machineToken: z.string().min(1),
@ -96,18 +101,18 @@ export async function POST(request: Request) {
response.cookies.set(name, value, options)
}
const machineCookiePayload = {
const machineCookiePayload: MachineContextCookiePayload = {
machineId: session.machine.id,
persona: session.machine.persona,
assignedUserId: session.machine.assignedUserId,
assignedUserEmail: session.machine.assignedUserEmail,
assignedUserName: session.machine.assignedUserName,
assignedUserRole: session.machine.assignedUserRole,
persona: session.machine.persona ?? null,
assignedUserId: session.machine.assignedUserId ?? null,
assignedUserEmail: session.machine.assignedUserEmail ?? null,
assignedUserName: session.machine.assignedUserName ?? null,
assignedUserRole: session.machine.assignedUserRole ?? null,
}
const isSecure = new URL(request.url).protocol === "https:"
response.cookies.set({
name: "machine_ctx",
value: Buffer.from(JSON.stringify(machineCookiePayload)).toString("base64url"),
name: MACHINE_CTX_COOKIE,
value: serializeMachineCookie(machineCookiePayload),
httpOnly: true,
sameSite: "lax",
secure: isSecure,

View file

@ -21,23 +21,30 @@ export function LoginPageClient() {
const { data: session, isPending } = useSession()
const callbackUrl = searchParams?.get("callbackUrl") ?? undefined
const [isHydrated, setIsHydrated] = useState(false)
const sessionUser = session?.user
const userId = sessionUser?.id ?? null
const normalizedRole = sessionUser?.role ? sessionUser.role.toLowerCase() : null
const persona = typeof sessionUser?.machinePersona === "string" ? sessionUser.machinePersona.toLowerCase() : null
useEffect(() => {
if (isPending) return
if (!session?.user) return
const role = (session.user.role ?? "").toLowerCase()
const persona = (session.user as any).machinePersona
? String((session.user as any).machinePersona).toLowerCase()
: null
if (!userId) return
const defaultDest =
role === "machine"
normalizedRole === "machine"
? persona === "manager"
? "/dashboard"
: "/portal/tickets"
: "/dashboard"
const destination = callbackUrl ?? defaultDest
router.replace(destination)
}, [callbackUrl, isPending, router, session?.user])
}, [
callbackUrl,
isPending,
normalizedRole,
persona,
router,
userId,
])
useEffect(() => {
setIsHydrated(true)

View file

@ -307,7 +307,7 @@ export function TicketSummaryHeader({ ticket }: TicketHeaderProps) {
})
setQueueSelection(ticket.queue ?? "")
setAssigneeSelection(ticket.assignee?.id ?? "")
}, [editing, ticket.category?.id, ticket.subcategory?.id, ticket.queue])
}, [editing, ticket.category?.id, ticket.subcategory?.id, ticket.queue, ticket.assignee?.id])
useEffect(() => {
if (!editing) return

View file

@ -0,0 +1,49 @@
"use client"
import { useCallback, useState } from "react"
import { Copy, Sparkles } from "lucide-react"
import { Button } from "@/components/ui/button"
import { cn } from "@/lib/utils"
interface CopyButtonProps {
value: string
onCopied?: () => void
}
export function CopyButton({ value, onCopied }: CopyButtonProps) {
const [copied, setCopied] = useState(false)
const handleCopy = useCallback(async () => {
try {
await navigator.clipboard.writeText(value)
setCopied(true)
onCopied?.()
setTimeout(() => setCopied(false), 2000)
} catch (error) {
console.error("Falha ao copiar código", error)
}
}, [onCopied, value])
return (
<Button
type="button"
variant="outline"
size="sm"
onClick={handleCopy}
className="relative overflow-hidden border border-dashed border-slate-300 bg-white px-3 py-2 text-sm font-semibold text-neutral-700 transition-all hover:border-slate-400 hover:bg-white active:scale-[0.97]"
>
<span className="pointer-events-none absolute inset-0 rounded-md bg-neutral-900/5 opacity-0 transition-opacity duration-100 ease-out active:opacity-100" />
<span
className={cn(
"flex items-center gap-2 transition-all duration-200 ease-out",
copied ? "text-emerald-600" : "text-neutral-700"
)}
>
{copied ? <Sparkles className="size-3.5" /> : <Copy className="size-3.5" />}
{copied ? "Copiado!" : "Copiar código"}
</span>
<span className="sr-only">Copiar código de provisionamento</span>
</Button>
)
}

View file

@ -0,0 +1,21 @@
"use client"
import { cn } from "@/lib/utils"
export function Crossblur({ active }: { active: boolean }) {
return (
<span
className={cn(
"pointer-events-none absolute inset-0 overflow-hidden rounded-xl transition-opacity duration-200 ease-out",
active ? "opacity-100" : "opacity-0"
)}
>
<span
className={cn(
"absolute inset-[-40%] rounded-full bg-[radial-gradient(circle_at_center,_rgba(59,130,246,0.25),_transparent_70%)] blur-lg transition-transform duration-500 ease-in-out",
active ? "scale-[1.05] rotate-6" : "scale-100 -rotate-12"
)}
/>
</span>
)
}

View file

@ -0,0 +1,24 @@
import { ConvexHttpClient } from "convex/browser"
import { env } from "@/lib/env"
export class ConvexConfigurationError extends Error {
constructor(message = "Convex não configurado.") {
super(message)
this.name = "ConvexConfigurationError"
}
}
export function requireConvexUrl(): string {
const url = env.NEXT_PUBLIC_CONVEX_URL
if (!url) {
throw new ConvexConfigurationError()
}
return url
}
export function createConvexClient(): ConvexHttpClient {
const url = requireConvexUrl()
return new ConvexHttpClient(url)
}

View file

@ -0,0 +1,70 @@
import { z } from "zod"
export const MACHINE_CTX_COOKIE = "machine_ctx"
const machineCookieSchema = z.object({
machineId: z.string(),
persona: z.string().nullable().optional(),
assignedUserId: z.string().nullable().optional(),
assignedUserEmail: z.string().nullable().optional(),
assignedUserName: z.string().nullable().optional(),
assignedUserRole: z.string().nullable().optional(),
})
const collaboratorSchema = z
.object({
email: z.string().email(),
name: z.string().optional(),
role: z.string().optional(),
})
.transform(({ email, name, role }) => {
const trimmedRole = role?.trim().toLowerCase()
const normalizedRole =
trimmedRole === "manager"
? "manager"
: trimmedRole === "collaborator"
? "collaborator"
: null
const normalizedName = typeof name === "string" ? name.trim() || null : null
return {
email: email.trim().toLowerCase(),
name: normalizedName,
role: normalizedRole,
}
})
const metadataSchema = z
.object({
collaborator: collaboratorSchema,
})
.passthrough()
export type MachineContextCookiePayload = z.infer<typeof machineCookieSchema>
export type CollaboratorMetadata = z.output<typeof collaboratorSchema>
export function parseMachineCookie(value: string | null | undefined): MachineContextCookiePayload | null {
if (!value) return null
try {
const json = Buffer.from(value, "base64url").toString("utf8")
const parsed = JSON.parse(json)
return machineCookieSchema.parse(parsed)
} catch {
return null
}
}
export function serializeMachineCookie(payload: MachineContextCookiePayload): string {
return Buffer.from(JSON.stringify(machineCookieSchema.parse(payload))).toString("base64url")
}
export function extractCollaboratorFromMetadata(metadata: unknown): CollaboratorMetadata | null {
if (!metadata) return null
const parsed = metadataSchema.safeParse(metadata)
if (!parsed.success) {
return null
}
return parsed.data.collaborator
}

View file

@ -1,19 +1,35 @@
import { describe, it, expect, vi, beforeEach } from "vitest"
import { describe, it, expect, vi } from "vitest"
// Mock tls to simulate an SMTP server over implicit TLS
let lastWrites: string[] = []
vi.mock("tls", () => {
type Listener = (...args: unknown[]) => void
class MockSocket {
listeners: Record<string, Function[]> = {}
listeners: Record<string, Listener[]> = {}
writes: string[] = []
// very small state machine of server responses
private step = 0
on(event: string, cb: Function) {
private enqueue(messages: string | string[], type: "data" | "end" = "data") {
const chunks = Array.isArray(messages) ? messages : [messages]
chunks.forEach((chunk, index) => {
const delay = index === 0 ? 0 : 1
setTimeout(() => {
if (type === "end") {
void chunk
this.emit("end")
return
}
this.emit("data", Buffer.from(chunk))
}, delay)
})
}
on(event: string, cb: Listener) {
this.listeners[event] = this.listeners[event] || []
this.listeners[event].push(cb)
return this
}
removeListener(event: string, cb: Function) {
removeListener(event: string, cb: Listener) {
if (!this.listeners[event]) return this
this.listeners[event] = this.listeners[event].filter((f) => f !== cb)
return this
@ -27,37 +43,36 @@ vi.mock("tls", () => {
// Respond depending on client command
if (this.step === 0 && line.startsWith("EHLO")) {
this.step = 1
this.emit("data", Buffer.from("250-local\r\n"))
this.emit("data", Buffer.from("250 OK\r\n"))
this.enqueue(["250-local\r\n", "250 OK\r\n"])
} else if (this.step === 1 && line === "AUTH LOGIN") {
this.step = 2
this.emit("data", Buffer.from("334 VXNlcm5hbWU6\r\n"))
this.enqueue("334 VXNlcm5hbWU6\r\n")
} else if (this.step === 2) {
this.step = 3
this.emit("data", Buffer.from("334 UGFzc3dvcmQ6\r\n"))
this.enqueue("334 UGFzc3dvcmQ6\r\n")
} else if (this.step === 3) {
this.step = 4
this.emit("data", Buffer.from("235 Auth OK\r\n"))
this.enqueue("235 Auth OK\r\n")
} else if (this.step === 4 && line.startsWith("MAIL FROM:")) {
this.step = 5
this.emit("data", Buffer.from("250 FROM OK\r\n"))
this.enqueue("250 FROM OK\r\n")
} else if (this.step === 5 && line.startsWith("RCPT TO:")) {
this.step = 6
this.emit("data", Buffer.from("250 RCPT OK\r\n"))
this.enqueue("250 RCPT OK\r\n")
} else if (this.step === 6 && line === "DATA") {
this.step = 7
this.emit("data", Buffer.from("354 End data with <CR><LF>.<CR><LF>\r\n"))
this.enqueue("354 End data with <CR><LF>.<CR><LF>\r\n")
} else if (this.step === 7 && line.endsWith(".")) {
this.step = 8
this.emit("data", Buffer.from("250 Queued\r\n"))
this.enqueue("250 Queued\r\n")
} else if (this.step === 8 && line === "QUIT") {
this.emit("end")
this.enqueue("", "end")
}
}
end() {}
}
function connect(_port: number, _host: string, _opts: unknown, cb?: Function) {
function connect(_port: number, _host: string, _opts: unknown, cb?: () => void) {
const socket = new MockSocket()
lastWrites = socket.writes
// initial server greeting
@ -92,7 +107,7 @@ describe("sendSmtpMail", () => {
it("extracts envelope address from parentheses or raw email", async () => {
const { sendSmtpMail } = await import("@/server/email-smtp")
const tlsMock = await import("tls" as any)
const tlsMock = (await import("tls")) as unknown as { __getLastWrites: () => string[] }
await sendSmtpMail(
{
host: "smtp.mock",
@ -105,7 +120,7 @@ describe("sendSmtpMail", () => {
"Subject",
"<p>Hi</p>"
)
const writes = (tlsMock as any).__getLastWrites() as string[]
const writes = tlsMock.__getLastWrites()
expect(writes.some((w) => /MAIL FROM:<chat@esdrasrenan.com.br>\r\n/.test(w))).toBe(true)
})
})

View file

@ -1,5 +1,5 @@
import { describe, expect, it } from "vitest"
import { dateKeyTZ, getTZParts, isAtHourTZ } from "@/lib/time"
import { dateKeyTZ, isAtHourTZ } from "@/lib/time"
describe("time tz helpers", () => {
it("computes date key in timezone", () => {
@ -14,4 +14,3 @@ describe("time tz helpers", () => {
expect(isAtHourTZ(d, "America/Sao_Paulo", 8)).toBe(true)
})
})

View file

@ -1,5 +1,4 @@
declare module "pdfkit/js/pdfkit.standalone.js" {
const PDFDocument: any
const PDFDocument: typeof import("pdfkit")
export default PDFDocument
}

View file

@ -13,5 +13,6 @@ export default defineConfig({
globals: true,
include: ["src/**/*.test.ts", "tests/**/*.test.ts"],
setupFiles: ["./vitest.setup.ts"],
testTimeout: 15000,
},
})