espace-paie-odentas/lambda-odentas-pades-sign/helpers/pades_backup.js
odentas b790faf12c feat: Implémentation complète du système Odentas Sign
- Remplacement de DocuSeal par solution souveraine Odentas Sign
- Système d'authentification OTP pour signataires (bcryptjs + JWT)
- 8 routes API: send-otp, verify-otp, sign, pdf-url, positions, status, webhook, signers
- Interface moderne avec canvas de signature et animations (framer-motion, confetti)
- Système de templates pour auto-détection des positions de signature (CDDU, RG, avenants)
- PDF viewer avec @react-pdf-viewer (compatible Next.js)
- Stockage S3: source/, signatures/, evidence/, signed/, certs/
- Tables Supabase: sign_requests, signers, sign_positions, sign_events, sign_assets
- Evidence bundle automatique (JSON metadata + timestamps)
- Templates emails: OTP et completion
- Scripts Lambda prêts: pades-sign (KMS seal) et tsaStamp (RFC3161)
- Mode test détecté automatiquement (emails whitelist)
- Tests complets avec PDF CDDU réel (2 signataires)
2025-10-27 19:03:07 +01:00

662 lines
No EOL
24 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import { PDFDocument } from 'pdf-lib';
import * as asn1js from 'asn1js';
import {
Certificate,
SignedData,
ContentInfo,
IssuerAndSerialNumber,
Attribute,
AlgorithmIdentifier,
EncapsulatedContentInfo,
SignerInfo,
SignedAndUnsignedAttributes
} from 'pkijs';
import crypto from 'node:crypto';
import { Buffer } from 'node:buffer';
// pkijs setup (webcrypto global) - utilisation de l'API Web Crypto native de Node.js 18
if (typeof globalThis.crypto === 'undefined') {
globalThis.crypto = crypto.webcrypto;
}
// =====================================================
// PDF helpers — PAdES incremental update avec /Sig + /ByteRange
// =====================================================
export async function preparePdfWithPlaceholder(pdfBytes) {
const originalPdf = Buffer.from(pdfBytes);
const pdfStructure = parsePdfStructure(originalPdf);
// Placeholders pour ByteRange et Contents (tailles fixes pour remplacement facile)
const byteRangePlaceholder = '[0000000000 0000000000 0000000000 0000000000]'; // 51 caractères
const contentsPlaceholder = '<' + '0'.repeat(65536) + '>'; // 32KB pour la signature CMS
// Générer le timestamp UNE SEULE FOIS pour éviter les différences entre digest et finalisation
const signingTime = new Date().toISOString().replace(/[-:T.Z]/g, '').slice(0, 14);
const incrementalUpdate = buildIncrementalUpdate(
pdfStructure,
byteRangePlaceholder,
contentsPlaceholder,
signingTime
);
return { originalPdf, pdfStructure, incrementalUpdate, signingTime };
}
// Parser le PDF pour extraire la structure nécessaire à la révision incrémentale
function parsePdfStructure(pdfBytes) {
const pdfStr = pdfBytes.toString('latin1');
// 1. Trouver le dernier startxref
const startxrefMatch = pdfStr.match(/startxref\s+(\d+)\s+%%EOF\s*$/);
if (!startxrefMatch) throw new Error('startxref non trouvé dans le PDF');
const prevStartxref = parseInt(startxrefMatch[1], 10);
// 2. Trouver le dernier numéro d'objet utilisé
const objRegex = /(\d+)\s+\d+\s+obj/g;
let maxObjNum = 0;
let match;
while ((match = objRegex.exec(pdfStr)) !== null) {
const objNum = parseInt(match[1], 10);
if (objNum > maxObjNum) maxObjNum = objNum;
}
// 3. Extraire le trailer pour trouver /Root et /Info
const trailerMatch = pdfStr.match(/trailer\s*<<([^>]*)>>/s);
let rootRef = null;
let infoRef = null;
let sizeNum = maxObjNum + 1;
let pagesRef = null;
if (trailerMatch) {
const trailerDict = trailerMatch[1];
const rootMatch = trailerDict.match(/\/Root\s+(\d+)\s+\d+\s+R/);
if (rootMatch) rootRef = parseInt(rootMatch[1], 10);
const infoMatch = trailerDict.match(/\/Info\s+(\d+)\s+\d+\s+R/);
if (infoMatch) infoRef = parseInt(infoMatch[1], 10);
const sizeMatch = trailerDict.match(/\/Size\s+(\d+)/);
if (sizeMatch) sizeNum = parseInt(sizeMatch[1], 10);
}
// 4. Chercher /AcroForm et /Pages dans le catalog
let acroFormRef = null;
if (rootRef) {
const catalogMatch = pdfStr.match(new RegExp(`${rootRef}\\s+\\d+\\s+obj\\s*<<([^>]*(?:>>.*?<<)*)>>`, 's'));
if (catalogMatch) {
const catalogDict = catalogMatch[1];
const acroMatch = catalogDict.match(/\/AcroForm\s+(\d+)\s+\d+\s+R/);
if (acroMatch) acroFormRef = parseInt(acroMatch[1], 10);
const pagesMatch = catalogDict.match(/\/Pages\s+(\d+)\s+\d+\s+R/);
if (pagesMatch) pagesRef = parseInt(pagesMatch[1], 10);
}
}
// 5. Trouver la première page (pour y attacher le widget)
let firstPageRef = null;
if (pagesRef) {
// Lire l'objet Pages
const pagesObjMatch = pdfStr.match(new RegExp(`${pagesRef}\\s+\\d+\\s+obj\\s*<<([^>]*(?:>>.*?<<)*)>>`, 's'));
if (pagesObjMatch) {
// Chercher /Kids [...]
const kidsMatch = pagesObjMatch[1].match(/\/Kids\s*\[\s*(\d+)\s+\d+\s+R/);
if (kidsMatch) firstPageRef = parseInt(kidsMatch[1], 10);
}
}
return {
prevStartxref,
nextObjNum: maxObjNum + 1,
rootRef,
infoRef,
acroFormRef,
firstPageRef,
pagesRef,
sizeNum
};
}
// Construire la révision incrémentale PDF avec /Sig, /ByteRange, /Contents
function buildIncrementalUpdate(pdfStructure, cmsHex, signingTime) {
const {
prevStartxref,
nextObjNum,
rootRef,
acroFormRef,
firstPageRef,
pagesRef,
sizeNum
} = pdfStructure;
let objNum = nextObjNum;
const newObjects = [];
// Taille du placeholder pour /Contents (doit être suffisant pour le CMS hex)
const contentsPlaceholderSize = 65536; // 32KB * 2 (hex)
const contentsPlaceholder = '<' + '0'.repeat(contentsPlaceholderSize) + '>';
// Placeholder ByteRange : sera calculé plus tard mais doit avoir une taille fixe
// Format: [0 AAAAAAAAAA BBBBBBBBBB CCCCCCCCCC] avec des chiffres, pas d'espaces variables
const byteRangePlaceholder = '[0000000000 0000000000 0000000000 0000000000]';
// 1. Créer le dictionnaire /TransformParams pour DocMDP level 1 (verrouillage total)
const transformParamsObjNum = objNum++;
const transformParamsObj = `${transformParamsObjNum} 0 obj
<<
/Type /TransformParams
/V /1.2
/P 1
>>
endobj
`;
newObjects.push(transformParamsObj);
// 2. Créer le dictionnaire /Sig avec /Reference pour DocMDP
const sigObjNum = objNum++;
const sigObj = `${sigObjNum} 0 obj
<<
/Type /Sig
/Filter /Adobe.PPKLite
/SubFilter /ETSI.CAdES.detached
/ByteRange ${byteRangePlaceholder}
/Contents ${contentsPlaceholder}
/M (D:${signingTime})
/Reference [<<
/Type /SigRef
/TransformMethod /DocMDP
/TransformParams ${transformParamsObjNum} 0 R
>>]
>>
endobj
`;
newObjects.push(sigObj);
// 3. Créer le widget de signature (annotation)
const widgetObjNum = objNum++;
const widgetObj = `${widgetObjNum} 0 obj
<<
/Type /Annot
/Subtype /Widget
/FT /Sig
/T (Signature1)
/V ${sigObjNum} 0 R
/P ${firstPageRef} 0 R
/Rect [0 0 0 0]
/F 132
>>
endobj
`;
newObjects.push(widgetObj);
// 4. Créer /AcroForm
const acroFormObjNum = objNum++;
const acroFormObj = `${acroFormObjNum} 0 obj
<<
/Fields [${widgetObjNum} 0 R]
/SigFlags 3
>>
endobj
`;
newObjects.push(acroFormObj);
// 5. Créer /Perms pour verrouiller le document (DocMDP level 1)
const permsObjNum = objNum++;
const permsObj = `${permsObjNum} 0 obj
<<
/DocMDP ${sigObjNum} 0 R
>>
endobj
`;
newObjects.push(permsObj);
// 6. Mettre à jour le Catalog pour référencer /AcroForm, /Pages et /Perms
const catalogObjNum = objNum++;
const catalogObj = `${catalogObjNum} 0 obj
<<
/Type /Catalog
/Pages ${pagesRef} 0 R
/AcroForm ${acroFormObjNum} 0 R
/Perms ${permsObjNum} 0 R
>>
endobj
`;
newObjects.push(catalogObj);
// 7. Mettre à jour la première page pour ajouter le widget aux /Annots
if (firstPageRef) {
const pageObjNum = objNum++;
const pageObj = `${pageObjNum} 0 obj
<<
/Type /Page
/Annots [${widgetObjNum} 0 R]
>>
endobj
`;
newObjects.push(pageObj);
}
return {
sigObjNum,
widgetObjNum,
acroFormObjNum,
catalogObjNum,
newObjects,
nextObjNum: objNum,
contentsPlaceholder,
contentsPlaceholderSize,
byteRangePlaceholder
};
}
export async function finalizePdfWithCms(pdfWithPlaceholder, byteRangeInfo, pdfStructure, incrementalUpdate, cmsDer, tempPdfWithRevision, contentsStart, contentsEnd) {
const { signatureSize } = byteRangeInfo;
// Convertir le CMS en hex
const cmsHex = cmsDer.toString('hex').toUpperCase();
if (cmsHex.length > signatureSize * 2) {
throw new Error(`CMS trop grand pour le placeholder (${cmsHex.length / 2} > ${signatureSize})`);
}
// Utiliser le PDF temporaire déjà assemblé
let finalPdfStr = tempPdfWithRevision.toString('latin1');
// Calculer le /ByteRange final
const byteRange = [0, contentsStart, contentsEnd, tempPdfWithRevision.length - contentsEnd];
console.log('[finalizePdfWithCms] ByteRange:', byteRange);
console.log('[finalizePdfWithCms] CMS hex length:', cmsHex.length);
// Remplacer le placeholder /ByteRange (padding avec des 0 pour garder longueur identique)
const byteRangeStr = `[${String(byteRange[0]).padStart(10, '0')} ${String(byteRange[1]).padStart(10, '0')} ${String(byteRange[2]).padStart(10, '0')} ${String(byteRange[3]).padStart(10, '0')}]`;
finalPdfStr = finalPdfStr.replace(
'[0000000000 0000000000 0000000000 0000000000]',
byteRangeStr
);
// Remplacer le placeholder /Contents avec le CMS hex (padded)
const cmsPadded = cmsHex + '0'.repeat(65536 - cmsHex.length);
finalPdfStr = finalPdfStr.replace(
/\/Contents <0+>/,
`/Contents <${cmsPadded}>`
);
const finalBuffer = Buffer.from(finalPdfStr, 'latin1');
// VALIDATION finale
const validationPart1 = finalBuffer.slice(byteRange[0], byteRange[0] + byteRange[1]);
const validationPart2 = finalBuffer.slice(byteRange[2], byteRange[2] + byteRange[3]);
const validationDigest = crypto.createHash('sha256').update(validationPart1).update(validationPart2).digest();
console.log('[finalizePdfWithCms] VALIDATION - PDF digest recalculé:', validationDigest.toString('hex'));
return finalBuffer;
}
// =====================================================
// SignedAttributes (DER) + digest SHA-256 pour KMS
// =====================================================
// OIDs utiles
const OID_ID_DATA = '1.2.840.113549.1.7.1'; // id-data (ContentInfo)
const OID_ATTR_CONTENT_TYPE = '1.2.840.113549.1.9.3';
const OID_ATTR_SIGNING_TIME = '1.2.840.113549.1.9.5';
const OID_ATTR_MESSAGE_DIGEST = '1.2.840.113549.1.9.4';
export async function buildSignedAttributesDigest(pdfWithPlaceholder, byteRangeInfo, pdfStructure, incrementalUpdate) {
// Construire le PDF final avec placeholder pour trouver le vrai ByteRange
const { originalPdfLength } = byteRangeInfo;
const originalPdf = pdfWithPlaceholder.slice(0, originalPdfLength);
// Assembler temporairement avec placeholder
const tempPdfWithRevision = assemblePdfWithRevision(originalPdf, pdfStructure, incrementalUpdate);
// Trouver position du /Contents dans ce PDF
const tempStr = tempPdfWithRevision.toString('latin1');
const contentsMatch = tempStr.match(/\/Contents <(0+)>/);
if (!contentsMatch) throw new Error('Placeholder /Contents non trouvé');
const contentsStart = contentsMatch.index + '/Contents <'.length;
const contentsEnd = contentsStart + contentsMatch[1].length;
console.log('[buildSignedAttributesDigest] ByteRange calculé:', [0, contentsStart, contentsEnd, tempPdfWithRevision.length - contentsEnd]);
// Calculer digest sur [0...contentsStart] + [contentsEnd...EOF]
const part1 = tempPdfWithRevision.slice(0, contentsStart);
const part2 = tempPdfWithRevision.slice(contentsEnd);
const pdfDigest = crypto.createHash('sha256').update(part1).update(part2).digest();
console.log('[buildSignedAttributesDigest] PDF digest (SHA256):', pdfDigest.toString('hex'));
const { signedAttributesDer, signedAttributesDigest } =
buildSignedAttributesDigestFromPdfDigest(pdfDigest);
return { signedAttributesDer, signedAttributesDigest, tempPdfWithRevision, contentsStart, contentsEnd };
}
// Helper pour assembler le PDF avec révision
function assemblePdfWithRevision(originalPdf, pdfStructure, incrementalUpdate) {
let currentOffset = originalPdf.length;
const objectsWithOffsets = [Buffer.from('\n', 'latin1')];
currentOffset += 1;
const xrefEntries = [];
for (let i = 0; i < incrementalUpdate.newObjects.length; i++) {
const objStr = incrementalUpdate.newObjects[i];
xrefEntries.push({ objNum: pdfStructure.nextObjNum + i, offset: currentOffset, gen: 0 });
objectsWithOffsets.push(Buffer.from(objStr, 'latin1'));
currentOffset += Buffer.byteLength(objStr, 'latin1');
}
const xrefOffset = currentOffset;
let xrefTable = 'xref\n0 1\n0000000000 65535 f \n';
xrefTable += `${pdfStructure.nextObjNum} ${xrefEntries.length}\n`;
for (const entry of xrefEntries) {
xrefTable += `${String(entry.offset).padStart(10, '0')} ${String(entry.gen).padStart(5, '0')} n \n`;
}
let trailer = `trailer\n<<\n/Size ${pdfStructure.nextObjNum + xrefEntries.length}\n/Prev ${pdfStructure.prevStartxref}\n/Root ${incrementalUpdate.catalogObjNum} 0 R\n`;
if (pdfStructure.infoRef) {
trailer += `/Info ${pdfStructure.infoRef} 0 R\n`;
}
trailer += `>>\nstartxref\n${xrefOffset}\n%%EOF\n`;
return Buffer.concat([originalPdf, ...objectsWithOffsets, Buffer.from(xrefTable + trailer, 'latin1')]);
}
export function buildSignedAttributesDigestFromPdfDigest(pdfMessageDigest) {
console.log('[buildSignedAttributesDigest] pdfMessageDigest:', pdfMessageDigest.toString('hex'));
// Attribute ::= SEQUENCE { attrType OBJECT IDENTIFIER, attrValues SET OF ANY }
const attrContentType = new asn1js.Sequence({
value: [
new asn1js.ObjectIdentifier({ value: OID_ATTR_CONTENT_TYPE }),
new asn1js.Set({ value: [ new asn1js.ObjectIdentifier({ value: OID_ID_DATA }) ] })
]
});
const attrSigningTime = new asn1js.Sequence({
value: [
new asn1js.ObjectIdentifier({ value: OID_ATTR_SIGNING_TIME }),
new asn1js.Set({ value: [ new asn1js.GeneralizedTime({ valueDate: new Date() }) ] })
]
});
const attrMessageDigest = new asn1js.Sequence({
value: [
new asn1js.ObjectIdentifier({ value: OID_ATTR_MESSAGE_DIGEST }),
new asn1js.Set({ value: [ new asn1js.OctetString({ valueHex: pdfMessageDigest }) ] })
]
});
// SET OF Attributes — l'ordre DER (par tag/valeur) sera appliqué par asn1js
const signedAttrsSet = new asn1js.Set({ value: [attrContentType, attrSigningTime, attrMessageDigest] });
const signedAttributesDer = Buffer.from(signedAttrsSet.toBER(false));
// Le digest à signer par KMS est SHA-256( DER(SignedAttributes) )
const signedAttributesDigest = crypto.createHash('sha256').update(signedAttributesDer).digest();
console.log('[buildSignedAttributesDigest] signedAttributesDigest (pour KMS):', signedAttributesDigest.toString('hex'));
return { signedAttributesDer, signedAttributesDigest };
}
// =====================================================
// PEM -> pkijs.Certificate(s)
// =====================================================
export function parsePemChainToPkijsCerts(chainData) {
try {
if (Buffer.isBuffer(chainData)) {
const previewHex = chainData.slice(0, 16).toString('hex');
console.log('[chain raw] length=', chainData.length, ' headHex=', previewHex);
}
} catch {}
let pemStr = null;
let derBuf = null;
// Normalisation des entrées (Buffer ou string)
if (Buffer.isBuffer(chainData)) {
derBuf = chainData; // binaire tel quel, peut être PEM en bytes ou DER
try { pemStr = chainData.toString('utf8'); } catch {}
} else if (typeof chainData === 'string') {
pemStr = chainData;
}
// Tentative 1 : parse PEM (headers BEGIN/END)
if (typeof pemStr === 'string' && pemStr.length > 0) {
try {
// strip BOM éventuel + normalisation des fins de ligne
if (pemStr.charCodeAt(0) === 0xFEFF) pemStr = pemStr.slice(1);
pemStr = pemStr.replace(/\r\n?/g, '\n');
const preview = String(pemStr).slice(0, 160);
console.log('[chain.pem preview]', preview.replace(/\n/g, '\\n'));
const blocks = splitPemBlocks(pemStr)
.filter(b => b.type === 'CERTIFICATE')
.map(b => Buffer.from(b.body, 'base64'));
if (blocks.length > 0) {
const certsPkijs = blocks.map(der => {
const asn1 = asn1js.fromBER(der.buffer.slice(der.byteOffset, der.byteOffset + der.byteLength));
if (asn1.offset === -1) throw new Error('ASN.1 parse error on cert (PEM)');
return new Certificate({ schema: asn1.result });
});
return { certsPkijs, signerCert: certsPkijs[0] };
}
} catch (e) {
console.log('[parsePemChainToPkijsCerts] PEM parse error, trying DER/base64:', String(e));
}
}
// Tentative 2 : DER brut (essayé même si le 1er octet n'est pas 0x30, avec logs)
if (derBuf && derBuf.length >= 4) {
try {
const asn1Any = asn1js.fromBER(derBuf.buffer.slice(derBuf.byteOffset || 0, (derBuf.byteOffset || 0) + derBuf.byteLength));
if (asn1Any.offset !== -1) {
// Si c'est un Certificate
try {
const cert = new Certificate({ schema: asn1Any.result });
if (cert && cert.serialNumber) {
console.log('[DER] Parsed as X.509 Certificate');
return { certsPkijs: [cert], signerCert: cert };
}
} catch {}
// Si c'est un ContentInfo/SignedData qui contient des certs
try {
const ci = new ContentInfo({ schema: asn1Any.result });
if (ci && ci.contentType === '1.2.840.113549.1.7.2') { // signedData
const sd = new SignedData({ schema: ci.content });
if (Array.isArray(sd.certificates) && sd.certificates.length) {
console.log(`[DER] Parsed PKCS#7 with ${sd.certificates.length} cert(s)`);
const certsPkijs = sd.certificates;
return { certsPkijs, signerCert: certsPkijs[0] };
}
}
} catch {}
}
} catch (e) {
console.log('[DER parse] error:', String(e));
}
}
// Tentative 3 : base64 sans entêtes
if (typeof pemStr === 'string' && pemStr.length > 0) {
try {
const b64 = pemStr.replace(/[^A-Za-z0-9+/=]/g, '');
if (b64.length >= 128) {
const buf = Buffer.from(b64, 'base64');
const asn1 = asn1js.fromBER(buf.buffer.slice(buf.byteOffset || 0, (buf.byteOffset || 0) + buf.byteLength));
if (asn1.offset !== -1) {
const cert = new Certificate({ schema: asn1.result });
return { certsPkijs: [cert], signerCert: cert };
}
}
} catch (e) {
console.log('[parsePemChainToPkijsCerts] base64 parse error:', String(e));
}
}
throw new Error('chain.pem vide ou invalide');
}
function splitPemBlocks(pem) {
try {
if (pem && pem.charCodeAt && pem.charCodeAt(0) === 0xFEFF) pem = pem.slice(1); // strip BOM
pem = String(pem);
} catch {}
// Normalise les fins de ligne et log de debug
pem = pem.replace(/\r\n?/g, '\n');
console.log('[splitPemBlocks] input length =', pem.length);
console.log('[splitPemBlocks] head =', pem.slice(0, 80).replace(/\n/g, '\\n'));
if (/BEGIN CERTIFICATE/.test(pem) === false) {
console.log('[splitPemBlocks] Aucun header PEM détecté dans le texte');
}
// Regex principale (ultra permissive)
const re = /-+BEGIN\s+([^\-\n\r]+)-+\s*([\s\S]*?)\s*-+END\s+\1-+/gi;
const blocks = [];
let m;
while ((m = re.exec(pem)) !== null) {
let type = m[1].trim().toUpperCase();
type = type.replace(/-+$/g, ''); // nettoie d'éventuels tirets résiduels
const body = m[2].replace(/\s+/g, '');
blocks.push({ type, body });
}
if (blocks.length > 0) {
console.log(`[splitPemBlocks] regex -> ${blocks.length} bloc(s): ` + blocks.map(b => b.type).join(', '));
return blocks;
}
// --- Fallback robuste par lecture ligne à ligne ---
console.log('[splitPemBlocks] regex a échoué, tentative fallback ligne-à-ligne');
const out = [];
const lines = pem.split('\n');
let i = 0;
while (i < lines.length) {
let line = lines[i].trim();
if (/^-+BEGIN /.test(line) && /-+$/.test(line)) {
const type = line.replace(/^-+BEGIN\s+/, '').replace(/-+$/, '').trim().toUpperCase();
const buf = [];
i++;
while (i < lines.length) {
const l = lines[i].trim();
if (new RegExp(`^-+END\\s+${type}-+$`, 'i').test(l)) {
break;
}
// ignore lignes vides et espaces, concaténer base64 brut
if (l.length) buf.push(l);
i++;
}
if (buf.length) out.push({ type, body: buf.join('') });
}
i++;
}
if (out.length === 0) {
console.log('[splitPemBlocks] fallback: aucun bloc détecté');
} else {
console.log(`[splitPemBlocks] fallback -> ${out.length} bloc(s): ` + out.map(b => b.type).join(', '));
}
return out;
}
// =====================================================
// CMS/PKCS#7 (SignedData) — construction complete (sans TSA pour linstant)
// =====================================================
export async function buildCmsSignedData(signedAttributesDer, signatureBytes, chainData) {
const { certsPkijs, signerCert } = parsePemChainToPkijsCerts(chainData);
// EncapsulatedContentInfo (detached): eContentType = id-data, pas de eContent
const encap = new EncapsulatedContentInfo({
eContentType: OID_ID_DATA
// eContent absent pour une signature détachée
});
const signedData = new SignedData({
version: 1,
encapContentInfo: encap
});
// Chaîne de certificats (sans la root)
signedData.certificates = certsPkijs;
// SignerInfo (sid = IssuerAndSerialNumber)
const issuerAndSerial = new IssuerAndSerialNumber({
issuer: signerCert.issuer,
serialNumber: signerCert.serialNumber
});
// digestAlgorithm = SHA-256
const digestAlgorithm = new AlgorithmIdentifier({ algorithmId: '2.16.840.1.101.3.4.2.1' });
// signatureAlgorithm = RSASSA-PSS avec SHA-256 / MGF1-SHA256 / saltLen=32
const rsassaPssParams = new asn1js.Sequence({
value: [
// hashAlgorithm (sha256)
new asn1js.Constructed({
idBlock: { tagClass: 3, tagNumber: 0 }, // [0]
value: [ new AlgorithmIdentifier({ algorithmId: '2.16.840.1.101.3.4.2.1' }).toSchema() ]
}),
// maskGenAlgorithm (mgf1 with sha256)
new asn1js.Constructed({
idBlock: { tagClass: 3, tagNumber: 1 }, // [1]
value: [ new AlgorithmIdentifier({
algorithmId: '1.2.840.113549.1.1.8', // mgf1
algorithmParams: new AlgorithmIdentifier({ algorithmId: '2.16.840.1.101.3.4.2.1' }).toSchema()
}).toSchema() ]
}),
// saltLength INTEGER 32
new asn1js.Primitive({ idBlock: { tagClass: 3, tagNumber: 2 }, valueHex: new asn1js.Integer({ value: 32 }).toBER(false) })
// trailerField [3] default 1 — omis
]
});
const signatureAlgorithm = new AlgorithmIdentifier({ algorithmId: '1.2.840.113549.1.1.10' }); // rsassaPss
signatureAlgorithm.algorithmParams = rsassaPssParams;
// Recréer les SignedAttributes comme objets pkijs à partir du DER fourni (pour cohérence DER)
const signedAttrsSet = parseSignedAttributesDerToPkijsSignedSet(signedAttributesDer);
const signerInfo = new SignerInfo({
version: 1,
sid: issuerAndSerial,
digestAlgorithm,
signatureAlgorithm,
signedAttrs: signedAttrsSet,
signature: new asn1js.OctetString({
valueHex: signatureBytes.buffer.slice(
signatureBytes.byteOffset || 0,
(signatureBytes.byteOffset || 0) + (signatureBytes.byteLength || signatureBytes.length)
)
})
});
signedData.signerInfos.push(signerInfo);
// ContentInfo enveloppe
const cms = new ContentInfo({ contentType: '1.2.840.113549.1.7.2', content: signedData.toSchema(true) });
const cmsDer = Buffer.from(cms.toSchema().toBER(false));
return cmsDer;
}
function parseSignedAttributesDerToPkijsSignedSet(signedAttributesDer) {
const view = signedAttributesDer instanceof Buffer ? new Uint8Array(signedAttributesDer) : signedAttributesDer;
const asn1 = asn1js.fromBER(view.buffer.slice(view.byteOffset || 0, (view.byteOffset || 0) + (view.byteLength || view.length)));
if (asn1.offset === -1 || !(asn1.result instanceof asn1js.Set)) {
throw new Error('SignedAttributes DER invalide');
}
const attrs = [];
for (const el of asn1.result.valueBlock.value) {
// SEQUENCE { type OBJECT IDENTIFIER, values SET OF ANY }
const seq = el; // asn1js.Sequence
const typeOid = seq.valueBlock.value[0];
const valuesSet = seq.valueBlock.value[1];
const type = typeOid.valueBlock.toString();
const values = valuesSet.valueBlock.value.map(v => v);
attrs.push(new Attribute({ type, values }));
}
// pkijs attend un SignedAndUnsignedAttributes pour signedAttrs (type=0)
const signedAttrs = new SignedAndUnsignedAttributes({ type: 0 });
signedAttrs.attributes = attrs;
return signedAttrs;
}