chore: sync staging
This commit is contained in:
parent
c5ddd54a3e
commit
561b19cf66
610 changed files with 105285 additions and 1206 deletions
|
|
@ -1,4 +1,4 @@
|
|||
import { query } from "./_generated/server";
|
||||
import { action, query } from "./_generated/server";
|
||||
import type { QueryCtx } from "./_generated/server";
|
||||
import { ConvexError, v } from "convex/values";
|
||||
import type { Doc, Id } from "./_generated/dataModel";
|
||||
|
|
@ -503,6 +503,41 @@ export const slaOverview = query({
|
|||
handler: slaOverviewHandler,
|
||||
});
|
||||
|
||||
export const triggerScheduledExports = action({
|
||||
args: {
|
||||
tenantId: v.optional(v.string()),
|
||||
},
|
||||
handler: async (_ctx, args) => {
|
||||
const secret = process.env.REPORTS_CRON_SECRET
|
||||
const baseUrl =
|
||||
process.env.REPORTS_CRON_BASE_URL ??
|
||||
process.env.NEXT_PUBLIC_APP_URL ??
|
||||
process.env.BETTER_AUTH_URL
|
||||
|
||||
if (!secret || !baseUrl) {
|
||||
console.warn("[reports] cron skip: missing REPORTS_CRON_SECRET or base URL")
|
||||
return { skipped: true }
|
||||
}
|
||||
|
||||
const endpoint = `${baseUrl.replace(/\/$/, "")}/api/reports/schedules/run`
|
||||
const response = await fetch(endpoint, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${secret}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ tenantId: args.tenantId }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const detail = await response.text().catch(() => response.statusText)
|
||||
throw new ConvexError(`Falha ao disparar agendamentos: ${response.status} ${detail}`)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
},
|
||||
})
|
||||
|
||||
export async function csatOverviewHandler(
|
||||
ctx: QueryCtx,
|
||||
{ tenantId, viewerId, range, companyId }: { tenantId: string; viewerId: Id<"users">; range?: string; companyId?: Id<"companies"> }
|
||||
|
|
@ -716,6 +751,101 @@ export const backlogOverview = query({
|
|||
handler: backlogOverviewHandler,
|
||||
});
|
||||
|
||||
type QueueTrendPoint = { date: string; opened: number; resolved: number }
|
||||
type QueueTrendEntry = {
|
||||
id: string
|
||||
name: string
|
||||
openedTotal: number
|
||||
resolvedTotal: number
|
||||
series: Map<string, QueueTrendPoint>
|
||||
}
|
||||
|
||||
export async function queueLoadTrendHandler(
|
||||
ctx: QueryCtx,
|
||||
{
|
||||
tenantId,
|
||||
viewerId,
|
||||
range,
|
||||
limit,
|
||||
}: { tenantId: string; viewerId: Id<"users">; range?: string; limit?: number }
|
||||
) {
|
||||
const viewer = await requireStaff(ctx, viewerId, tenantId)
|
||||
const days = range === "90d" ? 90 : range === "30d" ? 30 : 14
|
||||
const end = new Date()
|
||||
end.setUTCHours(0, 0, 0, 0)
|
||||
const endMs = end.getTime() + ONE_DAY_MS
|
||||
const startMs = endMs - days * ONE_DAY_MS
|
||||
const tickets = await fetchScopedTickets(ctx, tenantId, viewer)
|
||||
const queues = await fetchQueues(ctx, tenantId)
|
||||
|
||||
const queueNames = new Map<string, string>()
|
||||
queues.forEach((queue) => queueNames.set(String(queue._id), queue.name))
|
||||
queueNames.set("unassigned", "Sem fila")
|
||||
|
||||
const dayKeys: string[] = []
|
||||
for (let i = days - 1; i >= 0; i--) {
|
||||
const key = formatDateKey(endMs - (i + 1) * ONE_DAY_MS)
|
||||
dayKeys.push(key)
|
||||
}
|
||||
|
||||
const stats = new Map<string, QueueTrendEntry>()
|
||||
const ensureEntry = (queueId: string) => {
|
||||
if (!stats.has(queueId)) {
|
||||
const series = new Map<string, QueueTrendPoint>()
|
||||
dayKeys.forEach((key) => {
|
||||
series.set(key, { date: key, opened: 0, resolved: 0 })
|
||||
})
|
||||
stats.set(queueId, {
|
||||
id: queueId,
|
||||
name: queueNames.get(queueId) ?? "Sem fila",
|
||||
openedTotal: 0,
|
||||
resolvedTotal: 0,
|
||||
series,
|
||||
})
|
||||
}
|
||||
return stats.get(queueId)!
|
||||
}
|
||||
|
||||
for (const ticket of tickets) {
|
||||
const queueId = ticket.queueId ? String(ticket.queueId) : "unassigned"
|
||||
if (ticket.createdAt >= startMs && ticket.createdAt < endMs) {
|
||||
const entry = ensureEntry(queueId)
|
||||
const bucket = entry.series.get(formatDateKey(ticket.createdAt))
|
||||
if (bucket) {
|
||||
bucket.opened += 1
|
||||
}
|
||||
entry.openedTotal += 1
|
||||
}
|
||||
if (typeof ticket.resolvedAt === "number" && ticket.resolvedAt >= startMs && ticket.resolvedAt < endMs) {
|
||||
const entry = ensureEntry(queueId)
|
||||
const bucket = entry.series.get(formatDateKey(ticket.resolvedAt))
|
||||
if (bucket) {
|
||||
bucket.resolved += 1
|
||||
}
|
||||
entry.resolvedTotal += 1
|
||||
}
|
||||
}
|
||||
|
||||
const maxEntries = Math.max(1, Math.min(limit ?? 3, 6))
|
||||
const queuesTrend = Array.from(stats.values())
|
||||
.sort((a, b) => b.openedTotal - a.openedTotal)
|
||||
.slice(0, maxEntries)
|
||||
.map((entry) => ({
|
||||
id: entry.id,
|
||||
name: entry.name,
|
||||
openedTotal: entry.openedTotal,
|
||||
resolvedTotal: entry.resolvedTotal,
|
||||
series: dayKeys.map((key) => entry.series.get(key)!),
|
||||
}))
|
||||
|
||||
return { rangeDays: days, queues: queuesTrend }
|
||||
}
|
||||
|
||||
export const queueLoadTrend = query({
|
||||
args: { tenantId: v.string(), viewerId: v.id("users"), range: v.optional(v.string()), limit: v.optional(v.number()) },
|
||||
handler: queueLoadTrendHandler,
|
||||
})
|
||||
|
||||
// Touch to ensure CI convex_deploy runs and that agentProductivity is deployed
|
||||
export async function agentProductivityHandler(
|
||||
ctx: QueryCtx,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue