@@ -30,31 +30,31 @@ export default {
3030
3131 dependsOn : 'metadata' ,
3232
33- /**
34- * Process a chunk of items in a worker thread.
35- * Builds JSON sections - FS operations happen in generate().
36- *
37- * @param {Input } fullInput - Full metadata input for context rebuilding
38- * @param {number[] } itemIndices - Indices of head nodes to process
39- * @param {Partial<Omit<GeneratorOptions, 'worker'>> } _options - Serializable options (unused)
40- * @returns {Promise<import('./types.d.ts').Section[]> } JSON sections for each processed module
41- */
42- async processChunk ( fullInput , itemIndices ) {
43- const groupedModules = groupNodesByModule ( fullInput ) ;
44-
45- const headNodes = fullInput . filter ( node => node . heading . depth === 1 ) ;
46-
47- const results = [ ] ;
48-
49- for ( const idx of itemIndices ) {
50- const head = headNodes [ idx ] ;
51- const nodes = groupedModules . get ( head . api ) ;
33+ processChunk : Object . assign (
34+ /**
35+ * Process a chunk of items in a worker thread.
36+ * Builds JSON sections - FS operations happen in generate().
37+ *
38+ * With sliceInput, each item is pre-grouped {head, nodes} - no need to
39+ * recompute groupNodesByModule for every chunk.
40+ *
41+ * @param {Array<{head: ApiDocMetadataEntry, nodes: ApiDocMetadataEntry[]}> } slicedInput - Pre-sliced module data
42+ * @param {number[] } itemIndices - Indices into the sliced array
43+ * @returns {Promise<import('./types.d.ts').Section[]> } JSON sections for each processed module
44+ */
45+ async ( slicedInput , itemIndices ) => {
46+ const results = [ ] ;
47+
48+ for ( const idx of itemIndices ) {
49+ const { head, nodes } = slicedInput [ idx ] ;
50+
51+ results . push ( buildSection ( head , nodes ) ) ;
52+ }
5253
53- results . push ( buildSection ( head , nodes ) ) ;
54- }
55-
56- return results ;
57- } ,
54+ return results ;
55+ } ,
56+ { sliceInput : true }
57+ ) ,
5858
5959 /**
6060 * Generates a legacy JSON file.
@@ -64,11 +64,18 @@ export default {
6464 * @returns {AsyncGenerator<Array<import('./types.d.ts').Section>> }
6565 */
6666 async * generate ( input , { output, worker } ) {
67+ const groupedModules = groupNodesByModule ( input ) ;
68+
6769 const headNodes = input . filter ( node => node . heading . depth === 1 ) ;
6870
69- const deps = { output } ;
71+ // Create sliced input: each item contains head + its module's entries
72+ // This avoids sending all 4900+ entries to every worker
73+ const slicedInput = headNodes . map ( head => ( {
74+ head,
75+ nodes : groupedModules . get ( head . api ) ,
76+ } ) ) ;
7077
71- for await ( const chunkResult of worker . stream ( headNodes , input , deps ) ) {
78+ for await ( const chunkResult of worker . stream ( slicedInput , slicedInput ) ) {
7279 if ( output ) {
7380 for ( const section of chunkResult ) {
7481 const out = join ( output , `${ section . api } .json` ) ;
0 commit comments