@@ -5,7 +5,7 @@ import {Queue} from "./queue";
55interface DanmuIr {
66 obj : DanmuObject ;
77 str : string ; // for similarity algorithm
8- idx : int ;
8+ ptr_idx : int ;
99 sim_reason : string ;
1010}
1111
@@ -117,6 +117,10 @@ async function prepare_combine(wasm_mod: ArrayBuffer) {
117117 await sim_init ( wasm_mod ) ;
118118}
119119
120+ function make_ptr_idx ( idx : int , is_next_chunk : boolean ) : int {
121+ return is_next_chunk ? ( - 1 - idx ) : idx ;
122+ }
123+
120124async function do_combine ( chunk : DanmuChunk < DanmuObject > , next_chunk : DanmuChunk < DanmuObject > , config : LocalizedConfig ) : Promise < DanmuClusterOutput > {
121125 begin_chunk ( config ) ;
122126
@@ -136,7 +140,7 @@ async function do_combine(chunk: DanmuChunk<DanmuObject>, next_chunk: DanmuChunk
136140 function apply_cluster ( irs : DanmuIr [ ] ) {
137141 if ( irs . length === 1 ) {
138142 ret . clusters . push ( {
139- peers_ptr : irs . map ( ir => [ ir . idx , ir . sim_reason ] ) ,
143+ peers_ptr : irs . map ( ir => [ ir . ptr_idx , ir . sim_reason ] ) ,
140144 desc : [ ] ,
141145 chosen_str : irs [ 0 ] . obj . content , // do not use detaolued str for single danmu
142146 } ) ;
@@ -159,7 +163,7 @@ async function do_combine(chunk: DanmuChunk<DanmuObject>, next_chunk: DanmuChunk
159163 let most_text = select_median_length ( most_texts ) ;
160164
161165 ret . clusters . push ( {
162- peers_ptr : irs . map ( ir => [ ir . idx , ir . sim_reason ] ) ,
166+ peers_ptr : irs . map ( ir => [ ir . ptr_idx , ir . sim_reason ] ) ,
163167 desc : most_cnt > 1 ? [ `采用了出现 ${ most_cnt } 次的文本` ] : [ ] ,
164168 chosen_str : most_text ,
165169 } ) ;
@@ -170,7 +174,7 @@ async function do_combine(chunk: DanmuChunk<DanmuObject>, next_chunk: DanmuChunk
170174 let whitelisted = whitelisted_meta ( config ) ;
171175 let blacklisted = blacklisted_meta ( config ) ;
172176
173- function obj_to_ir ( objs : DanmuObject [ ] , s : Stats | null ) : DanmuIr [ ] {
177+ function obj_to_ir ( objs : DanmuObject [ ] , s : Stats | null , is_next_chunk : boolean ) : DanmuIr [ ] {
174178 return objs
175179 . map ( ( obj , idx ) => {
176180 if ( ! config . PROC_POOL1 && obj . pool === 1 ) {
@@ -244,15 +248,15 @@ async function do_combine(chunk: DanmuChunk<DanmuObject>, next_chunk: DanmuChunk
244248 return {
245249 obj : obj ,
246250 str : detaolued ,
247- idx : idx ,
251+ ptr_idx : make_ptr_idx ( idx , is_next_chunk ) ,
248252 sim_reason : 'ORIG' ,
249253 } ;
250254 } )
251255 . filter ( obj => obj !== null ) as DanmuIr [ ] ;
252256 }
253257
254- let danmus = obj_to_ir ( chunk . objs , ret . stats ) ;
255- let next_chunk_danmus = obj_to_ir ( next_chunk . objs , null ) ;
258+ let danmus = obj_to_ir ( chunk . objs , ret . stats , false ) ;
259+ let next_chunk_danmus = obj_to_ir ( next_chunk . objs , null , true ) ;
256260
257261 let nearby_danmus : Queue < DanmuIr [ ] > = new Queue ( ) ;
258262
0 commit comments