chore: upgrade next 16.0.8 and tidy local archive
This commit is contained in:
parent
0df1e87f61
commit
821cb7faa7
7 changed files with 270 additions and 45 deletions
|
|
@ -1,7 +1,8 @@
|
|||
import { mkdir, writeFile } from "fs/promises"
|
||||
import { join, dirname } from "path"
|
||||
import { join, dirname, relative, isAbsolute } from "path"
|
||||
|
||||
import { api } from "@/convex/_generated/api"
|
||||
import type { Id } from "@/convex/_generated/dataModel"
|
||||
import { DEFAULT_TENANT_ID } from "@/lib/constants"
|
||||
import { env } from "@/lib/env"
|
||||
import { createConvexClient } from "@/server/convex-client"
|
||||
|
|
@ -10,6 +11,7 @@ type ArchiveItem = {
|
|||
ticket: Record<string, unknown>
|
||||
comments: Array<Record<string, unknown>>
|
||||
events: Array<Record<string, unknown>>
|
||||
attachments: ArchivedAttachment[]
|
||||
}
|
||||
|
||||
type ExportResponse = {
|
||||
|
|
@ -17,6 +19,18 @@ type ExportResponse = {
|
|||
items: ArchiveItem[]
|
||||
}
|
||||
|
||||
type ArchivedAttachment = {
|
||||
storageId: string
|
||||
name: string | null
|
||||
size: number | null
|
||||
type: string | null
|
||||
archivedPath: string | null
|
||||
ticketId: string
|
||||
commentId: string | null
|
||||
status: "downloaded" | "skipped" | "failed"
|
||||
error?: string
|
||||
}
|
||||
|
||||
function assertArchiveSecret(): string {
|
||||
const secret = env.INTERNAL_HEALTH_TOKEN ?? env.REPORTS_CRON_SECRET
|
||||
if (!secret) {
|
||||
|
|
@ -29,11 +43,188 @@ function nowIso() {
|
|||
return new Date().toISOString().replace(/[:.]/g, "-")
|
||||
}
|
||||
|
||||
const DEFAULT_ARCHIVE_DIR = join(process.cwd(), ".data", "archives")
|
||||
const ARCHIVE_DIR = resolveArchiveDir(process.env.ARCHIVE_DIR)
|
||||
const ARCHIVE_ENABLED = process.env.LOCAL_ARCHIVE_ENABLED === "true"
|
||||
const ARCHIVE_FILENAME = "tickets-archive-latest.jsonl"
|
||||
|
||||
function resolveArchiveDir(dir: string | undefined | null) {
|
||||
const normalized = (dir ?? "").trim()
|
||||
if (!normalized || normalized === "." || normalized === "./") {
|
||||
return DEFAULT_ARCHIVE_DIR
|
||||
}
|
||||
if (!isAbsolute(normalized)) {
|
||||
throw new Error("ARCHIVE_DIR deve ser um caminho absoluto ou use o padrão ./archives")
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
function normalizeFilename(name: string | null | undefined, fallback: string) {
|
||||
const base = (name ?? "").trim() || fallback
|
||||
return base.replace(/[^a-zA-Z0-9._-]/g, "_")
|
||||
}
|
||||
|
||||
async function downloadAttachment(params: {
|
||||
storageId: string
|
||||
name: string | null
|
||||
ticketId: string
|
||||
commentId: string | null
|
||||
archiveDir: string
|
||||
client: ReturnType<typeof createConvexClient>
|
||||
}) {
|
||||
const { storageId, name, ticketId, commentId, archiveDir, client } = params
|
||||
try {
|
||||
const url = await client.action(api.files.getUrl, { storageId: storageId as Id<"_storage"> })
|
||||
if (!url) {
|
||||
return {
|
||||
storageId,
|
||||
name: name ?? null,
|
||||
size: null,
|
||||
type: null,
|
||||
archivedPath: null,
|
||||
ticketId,
|
||||
commentId,
|
||||
status: "skipped" as const,
|
||||
error: "URL vazia para anexo",
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
return {
|
||||
storageId,
|
||||
name: name ?? null,
|
||||
size: null,
|
||||
type: null,
|
||||
archivedPath: null,
|
||||
ticketId,
|
||||
commentId,
|
||||
status: "failed" as const,
|
||||
error: `HTTP ${response.status}`,
|
||||
}
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
const contentType = response.headers.get("content-type") ?? undefined
|
||||
const targetDir = join(archiveDir, "attachments", ticketId, commentId ?? "ticket")
|
||||
await mkdir(targetDir, { recursive: true })
|
||||
const filename = normalizeFilename(name, `${storageId}.bin`)
|
||||
const filePath = join(targetDir, filename)
|
||||
await writeFile(filePath, buffer)
|
||||
|
||||
return {
|
||||
storageId,
|
||||
name: name ?? null,
|
||||
size: buffer.byteLength,
|
||||
type: contentType ?? null,
|
||||
archivedPath: relative(archiveDir, filePath),
|
||||
ticketId,
|
||||
commentId,
|
||||
status: "downloaded" as const,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
storageId,
|
||||
name: name ?? null,
|
||||
size: null,
|
||||
type: null,
|
||||
archivedPath: null,
|
||||
ticketId,
|
||||
commentId,
|
||||
status: "failed" as const,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadAttachmentsFromArchive(
|
||||
items: ArchiveItem[],
|
||||
archiveDir: string,
|
||||
client: ReturnType<typeof createConvexClient>
|
||||
) {
|
||||
const unique = new Map<string, { ticketId: string; commentId: string | null; name: string | null }>()
|
||||
for (const item of items) {
|
||||
for (const comment of item.comments) {
|
||||
const commentId = (comment as { _id?: string })._id ?? null
|
||||
const attachments = (comment as { attachments?: Array<Record<string, unknown>> }).attachments ?? []
|
||||
for (const attachment of attachments) {
|
||||
const storageId = typeof attachment.storageId === "string" ? attachment.storageId : null
|
||||
if (!storageId) continue
|
||||
if (unique.has(storageId)) continue
|
||||
unique.set(storageId, {
|
||||
ticketId: String(item.ticket?._id ?? "unknown"),
|
||||
commentId,
|
||||
name: typeof attachment.name === "string" ? attachment.name : null,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const results: ArchivedAttachment[] = []
|
||||
for (const [storageId, meta] of unique.entries()) {
|
||||
const result = await downloadAttachment({
|
||||
storageId,
|
||||
name: meta.name,
|
||||
ticketId: meta.ticketId,
|
||||
commentId: meta.commentId,
|
||||
archiveDir,
|
||||
client,
|
||||
})
|
||||
results.push(result)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
export type ArchivedTicketLookup = {
|
||||
record: ArchiveItem & { archivedAt: number; ticketId: string; tenantId: string }
|
||||
file: string
|
||||
}
|
||||
|
||||
export async function findArchivedTicket(ticketId: string) {
|
||||
if (!ARCHIVE_ENABLED) {
|
||||
return null
|
||||
}
|
||||
const archiveDir = ARCHIVE_DIR
|
||||
const { readFile } = await import("node:fs/promises")
|
||||
|
||||
await mkdir(archiveDir, { recursive: true })
|
||||
const fullPath = join(archiveDir, ARCHIVE_FILENAME)
|
||||
const contents = await readFile(fullPath, "utf-8").catch(() => null)
|
||||
if (!contents) return null
|
||||
|
||||
const lines = contents.split("\n").filter(Boolean)
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const parsed = JSON.parse(line) as ArchiveItem & { archivedAt: number; ticketId: string; tenantId: string }
|
||||
if (String(parsed.ticketId) === String(ticketId)) {
|
||||
return {
|
||||
record: parsed,
|
||||
file: fullPath,
|
||||
} satisfies ArchivedTicketLookup
|
||||
}
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export async function exportResolvedTicketsToDisk(options?: {
|
||||
days?: number
|
||||
limit?: number
|
||||
tenantId?: string
|
||||
includeAttachments?: boolean
|
||||
}) {
|
||||
if (!ARCHIVE_ENABLED) {
|
||||
return {
|
||||
written: 0,
|
||||
attachments: { total: 0, failed: 0 },
|
||||
file: null,
|
||||
}
|
||||
}
|
||||
const days = options?.days ?? 365
|
||||
const limit = options?.limit ?? 50
|
||||
const tenantId = options?.tenantId ?? DEFAULT_TENANT_ID
|
||||
|
|
@ -49,25 +240,59 @@ export async function exportResolvedTicketsToDisk(options?: {
|
|||
secret,
|
||||
})) as ExportResponse
|
||||
|
||||
const archiveDir = env.ARCHIVE_DIR ?? "./archives"
|
||||
const filename = `tickets-archive-${nowIso()}-resolved-${days}d.jsonl`
|
||||
const fullPath = join(archiveDir, filename)
|
||||
const archiveDir = ARCHIVE_DIR
|
||||
const fullPath = join(archiveDir, ARCHIVE_FILENAME)
|
||||
await mkdir(dirname(fullPath), { recursive: true })
|
||||
|
||||
const lines = res.items.map((item) =>
|
||||
JSON.stringify({
|
||||
ticketId: item.ticket?._id ?? null,
|
||||
let attachments: ArchivedAttachment[] = []
|
||||
if (options?.includeAttachments ?? true) {
|
||||
attachments = await downloadAttachmentsFromArchive(res.items, archiveDir, client)
|
||||
}
|
||||
|
||||
const byStorageId = new Map<string, ArchivedAttachment>()
|
||||
for (const att of attachments) {
|
||||
byStorageId.set(att.storageId, att)
|
||||
}
|
||||
|
||||
const lines = res.items.map((item) => {
|
||||
const ticketId = String(item.ticket?._id ?? "")
|
||||
const ticketAttachments = item.comments.flatMap((comment) => {
|
||||
const commentId = (comment as { _id?: string })._id ?? null
|
||||
const raw = (comment as { attachments?: Array<Record<string, unknown>> }).attachments ?? []
|
||||
return raw.map((att) => {
|
||||
const storageId = typeof att.storageId === "string" ? att.storageId : null
|
||||
const archived = storageId ? byStorageId.get(storageId) : null
|
||||
return {
|
||||
storageId,
|
||||
name: typeof att.name === "string" ? att.name : null,
|
||||
size: typeof att.size === "number" ? att.size : null,
|
||||
type: typeof att.type === "string" ? att.type : null,
|
||||
archivedPath: archived?.archivedPath ?? null,
|
||||
status: archived?.status ?? "skipped",
|
||||
ticketId,
|
||||
commentId,
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return JSON.stringify({
|
||||
ticketId,
|
||||
tenantId,
|
||||
archivedAt: Date.now(),
|
||||
ticket: item.ticket,
|
||||
comments: item.comments,
|
||||
events: item.events,
|
||||
attachments: ticketAttachments,
|
||||
})
|
||||
)
|
||||
})
|
||||
await writeFile(fullPath, lines.join("\n"), { encoding: "utf-8" })
|
||||
|
||||
return {
|
||||
written: res.items.length,
|
||||
attachments: {
|
||||
total: attachments.length,
|
||||
failed: attachments.filter((a) => a.status === "failed").length,
|
||||
},
|
||||
file: fullPath,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue