feat: enhance S3 error handling and retry logic in previewLatestForUser function
This commit is contained in:
parent
eb99d54453
commit
5764ce5cdc
@ -253,6 +253,19 @@ function streamToString(s3BodyStream, templateId) {
|
||||
});
|
||||
}
|
||||
|
||||
function formatS3Error(err) {
|
||||
if (!err) return null;
|
||||
if (typeof err === 'string') return { message: err };
|
||||
return {
|
||||
name: err.name,
|
||||
message: err.message,
|
||||
code: err.code,
|
||||
stack: err.stack,
|
||||
metadata: err.$metadata,
|
||||
cause: err.cause && (err.cause.message || err.cause)
|
||||
};
|
||||
}
|
||||
|
||||
// Ensure HTML is a valid document
|
||||
function ensureHtmlDocument(html) {
|
||||
// If it already looks like a full HTML doc, return as is
|
||||
@ -1410,6 +1423,8 @@ exports.previewLatestForUser = async (req, res) => {
|
||||
const allowedContractTypes = ['contract', 'gdpr'];
|
||||
const contractType = allowedContractTypes.includes(contractTypeParam) ? contractTypeParam : 'contract';
|
||||
|
||||
let folderStructureWarning = null;
|
||||
|
||||
logger.info('[previewLatestForUser] start', { targetUserId, contractType, requestId: req.id });
|
||||
|
||||
if (!req.user || !['admin', 'super_admin'].includes(req.user.role)) {
|
||||
@ -1514,7 +1529,7 @@ exports.previewLatestForUser = async (req, res) => {
|
||||
}
|
||||
|
||||
try {
|
||||
const s3File = sharedExoscaleClient || new S3Client({
|
||||
const createClient = () => sharedExoscaleClient || new S3Client({
|
||||
region: process.env.EXOSCALE_REGION,
|
||||
endpoint: process.env.EXOSCALE_ENDPOINT,
|
||||
forcePathStyle: true,
|
||||
@ -1523,6 +1538,7 @@ exports.previewLatestForUser = async (req, res) => {
|
||||
secretAccessKey: process.env.EXOSCALE_SECRET_KEY
|
||||
}
|
||||
});
|
||||
let s3File = createClient();
|
||||
logger.info('[previewLatestForUser] attempting S3 fetch', {
|
||||
bucket: process.env.EXOSCALE_BUCKET,
|
||||
key: doc.object_storage_id,
|
||||
@ -1530,7 +1546,27 @@ exports.previewLatestForUser = async (req, res) => {
|
||||
contractType
|
||||
});
|
||||
const cmd = new GetObjectCommand({ Bucket: process.env.EXOSCALE_BUCKET, Key: doc.object_storage_id });
|
||||
const fileObj = await s3File.send(cmd);
|
||||
let fileObj;
|
||||
try {
|
||||
fileObj = await s3File.send(cmd);
|
||||
} catch (firstErr) {
|
||||
logger.warn('[previewLatestForUser] S3 fetch failed (shared client), retrying with fresh client', {
|
||||
key: doc.object_storage_id,
|
||||
userId: targetUserId,
|
||||
contractType,
|
||||
error: formatS3Error(firstErr)
|
||||
});
|
||||
s3File = new S3Client({
|
||||
region: process.env.EXOSCALE_REGION,
|
||||
endpoint: process.env.EXOSCALE_ENDPOINT,
|
||||
forcePathStyle: true,
|
||||
credentials: {
|
||||
accessKeyId: process.env.EXOSCALE_ACCESS_KEY,
|
||||
secretAccessKey: process.env.EXOSCALE_SECRET_KEY
|
||||
}
|
||||
});
|
||||
fileObj = await s3File.send(cmd);
|
||||
}
|
||||
const pdfBuffer = await s3BodyToBuffer(fileObj.Body);
|
||||
if (!pdfBuffer || !pdfBuffer.length) {
|
||||
logger.warn('[previewLatestForUser] S3 returned empty Body', { key: doc.object_storage_id, userId: targetUserId, contractType });
|
||||
@ -1554,7 +1590,12 @@ exports.previewLatestForUser = async (req, res) => {
|
||||
if (folderStructureWarning) res.setHeader('X-Contract-Preview-Warning', folderStructureWarning);
|
||||
return res.status(404).json(jsonWithWarning({ message: `${contractType.toUpperCase()} file not available` }));
|
||||
}
|
||||
logger.error('[previewLatestForUser] S3 fetch failed', e && (e.stack || e.message));
|
||||
logger.error('[previewLatestForUser] S3 fetch failed', {
|
||||
key: doc.object_storage_id,
|
||||
userId: targetUserId,
|
||||
contractType,
|
||||
error: formatS3Error(e)
|
||||
});
|
||||
if (folderStructureWarning) res.setHeader('X-Contract-Preview-Warning', folderStructureWarning);
|
||||
return res.status(500).json(jsonWithWarning({ message: 'Failed to load user document' }));
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user