1/*---------------------------------------------------------------------------* 2 * astar.c * 3 * * 4 * Copyright 2007, 2008 Nuance Communciations, Inc. * 5 * * 6 * Licensed under the Apache License, Version 2.0 (the 'License'); * 7 * you may not use this file except in compliance with the License. * 8 * * 9 * You may obtain a copy of the License at * 10 * http://www.apache.org/licenses/LICENSE-2.0 * 11 * * 12 * Unless required by applicable law or agreed to in writing, software * 13 * distributed under the License is distributed on an 'AS IS' BASIS, * 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 15 * See the License for the specific language governing permissions and * 16 * limitations under the License. * 17 * * 18 *---------------------------------------------------------------------------*/ 19 20#include "pstdio.h" 21#include "passert.h" 22 23#include"srec_sizes.h" 24#include"search_network.h" 25#include"srec.h" 26#include"srec_context.h" 27#include"word_lattice.h" 28#include "portable.h" 29#include "srec_stats.h" 30#include "astar.h" 31#include "astar_pphash.h" 32 33#ifdef SET_RCSID 34static const char *rcsid = 0 ? (const char *) &rcsid : 35 "$Id: astar.c,v 1.19.4.9 2008/04/30 15:12:15 dahan Exp $"; 36#endif 37 38#define PRINT_ASTAR_SOMEWHAT 0 39#define PRINT_ASTAR_DETAILS 0 40#define PRINT_ASTAR_QBT_DETAILS 0 /* Quick Back Trace */ 41#define MAX_NBEST_LEN 32 42#define NBEST_LEN_MARGIN 10 43#define MAX_NUM_PARPS 400 /* 3*MAX_NBEST_LEN*MAX_WORDS_PER_COMPLETE_PATH need better dup 44 check on complete paths */ 45#define ASTAR_PRUNE_DELTA 20000 46#define DEBUG_PARP_MANAGEMENT 0 47 48#if PRINT_ASTAR_DETAILS 49static int do_draw_as_dotty = 0; 50static int do_draw_file_idx = 0; 51 52int astar_draw_tree_as_dotty(const char* file, srec* rec, AstarStack* stack); 53#endif 54 55/* 56 The word graph is represented as an arc_token_list, 57 arc_token's are chained together 2 linked lists, 58 arc_token->first_next_arc ... like a "TO" node 59 arc_token->next_arc_index ... a linked list of arcs leaving the same node 60 61 get_arc_for_word() finds the arc_token for a particular extension 62 backward though the word graph (ie. forward through the reverse word graph) 63 64*/ 65 66#define ARC_TOKEN_ONE (arc_token*)1 67arc_token* get_arc_for_word(arc_token* atoken, wordID word, 68 void* context_void, 69 wordID terminal_word) 70{ 71 srec_context* context = (srec_context*)context_void; 72 arc_token* arc_token_list = context->arc_token_list; 73 arc_token* tmp; 74 wordmap* wmap = context->olabels; 75 76 if (atoken == ARC_TOKEN_ONE) 77 { 78 /* log_report("Warning: bad thing is happening word=%d\n", word); */ 79 return 0; 80 } 81 else if (atoken == 0) 82 { 83 arc_token root_arc; 84 root_arc.first_next_arc = ARC_TOKEN_LNK(arc_token_list, 0); 85 root_arc.next_token_index = ARC_TOKEN_NULL; 86 root_arc.ilabel = root_arc.olabel = 0; 87 return get_arc_for_word(&root_arc, word, context_void, terminal_word); 88 89 /* the arc token is NULL for partial paths just starting; but 90 the word graph has nasty epsilons at the beginning, we'll remove 91 them later, but must handle them in the mean time. */ 92 atoken = &arc_token_list[0]; 93 for (; atoken; atoken = ARC_TOKEN_PTR(arc_token_list, atoken->next_token_index)) 94 { 95 if (atoken->ilabel == word) 96 return atoken; 97 else if (atoken->ilabel == WORD_EPSILON_LABEL) 98 { 99 for (tmp = ARC_TOKEN_PTR(arc_token_list, atoken->first_next_arc); tmp; 100 tmp = ARC_TOKEN_PTR(arc_token_list, tmp->next_token_index)) 101 if (tmp->ilabel == word) 102 return tmp; 103 } 104 else if (atoken->ilabel < wmap->num_slots) 105 { 106 if (wordmap_whether_in_rule(wmap, word, atoken->ilabel)) 107 return atoken; 108 } 109 } 110 return 0; 111 } 112 else if (word == terminal_word) 113 { 114 /* -pau- LABEL, the word graph does not seem to have them! */ 115 tmp = ARC_TOKEN_PTR(arc_token_list, atoken->first_next_arc); 116 if (!tmp) 117 return ARC_TOKEN_ONE; 118 else if (tmp->first_next_arc == ARC_TOKEN_NULL && (tmp->ilabel == MAXwordID || tmp->ilabel == terminal_word)) 119 return ARC_TOKEN_ONE; 120 else 121 { 122 /* again more weirdness in the output graph format1 123 might be due to multiple endnodes? */ 124 for (tmp = ARC_TOKEN_PTR(arc_token_list, atoken->first_next_arc); tmp; 125 tmp = ARC_TOKEN_PTR(arc_token_list, tmp->next_token_index)) 126 if (tmp->ilabel == MAXwordID && tmp->first_next_arc == ARC_TOKEN_NULL) 127 return ARC_TOKEN_ONE; 128 return 0; 129 } 130 } 131 else 132 { 133#if PRINT_ASTAR_DETAILS 134 printf("word %d allowed? ", word); 135#endif 136 if (atoken->first_next_arc == ARC_TOKEN_NULL) 137 return 0; 138 else 139 tmp = ARC_TOKEN_PTR(arc_token_list, atoken->first_next_arc); 140 /* handle single epsilons */ 141 if (tmp->ilabel == WORD_EPSILON_LABEL && tmp->next_token_index == ARC_TOKEN_NULL) 142 tmp = ARC_TOKEN_PTR(arc_token_list, tmp->first_next_arc); 143 for (; tmp; tmp = ARC_TOKEN_PTR(arc_token_list, tmp->next_token_index)) 144 { 145#if PRINT_ASTAR_DETAILS 146 printf(" W%d(%s)", tmp->ilabel, tmp->ilabel != MAXwordID ? wmap->words[tmp->ilabel] : ""); 147#endif 148 if (tmp->ilabel == word) 149 { 150#if PRINT_ASTAR_DETAILS 151 printf("\n"); 152#endif 153 return tmp; 154 } 155 else if (tmp->ilabel < wmap->num_slots) 156 { 157 if (wordmap_whether_in_rule(wmap, word, tmp->ilabel)) 158 return tmp; 159 } 160 } 161#if PRINT_ASTAR_DETAILS 162 printf("\n"); 163#endif 164 return 0; 165 } 166} 167 168arc_token* get_arc_for_word_without_slot_annotation(arc_token* atoken, const char* word, 169 void* context_void, 170 wordID terminal_word) 171{ 172 srec_context* context = (srec_context*)context_void; 173 arc_token* arc_token_list = context->arc_token_list; 174 arc_token* tmp; 175 wordmap* wmap = context->olabels; 176 wordID wdid = wordmap_find_index(wmap, word); 177 178 if (atoken == ARC_TOKEN_ONE) 179 { 180 /* log_report("Warning: bad thing is happening word=%d\n", word); */ 181 return 0; 182 } 183 else if (atoken == NULL) 184 { 185 arc_token root_arc; 186 root_arc.first_next_arc = ARC_TOKEN_LNK(arc_token_list, 0); 187 root_arc.next_token_index = ARC_TOKEN_NULL; 188 root_arc.ilabel = root_arc.olabel = 0; 189 return get_arc_for_word_without_slot_annotation(&root_arc, word, context_void, terminal_word); 190 } 191 else if (word == NULL) 192 { 193 /* -pau- LABEL, the word graph does not seem to have them! */ 194 tmp = ARC_TOKEN_PTR(arc_token_list, atoken->first_next_arc); 195 if (!tmp) 196 return ARC_TOKEN_ONE; 197 else if (!tmp->first_next_arc && (tmp->ilabel == MAXwordID || tmp->ilabel == terminal_word)) 198 return ARC_TOKEN_ONE; 199 else 200 { 201 /* again more weirdness in the output graph format1 202 might be due to multiple endnodes? */ 203 for (tmp = ARC_TOKEN_PTR(arc_token_list, atoken->first_next_arc); tmp; 204 tmp = ARC_TOKEN_PTR(arc_token_list, tmp->next_token_index)) 205 if (tmp->ilabel == MAXwordID && tmp->first_next_arc == ARC_TOKEN_NULL) 206 return ARC_TOKEN_ONE; 207 return 0; 208 } 209 } 210 else 211 { 212 for (tmp = ARC_TOKEN_PTR(arc_token_list, atoken->first_next_arc); tmp; 213 tmp = ARC_TOKEN_PTR(arc_token_list, tmp->next_token_index)) 214 { 215 if (tmp->ilabel == wdid) 216 { 217 return tmp; 218 } 219 else if (tmp->ilabel < wmap->num_slots) 220 { 221 wdid = wordmap_find_index_in_rule(wmap, word, tmp->ilabel); 222 if (wdid != MAXwordID) 223 return tmp; 224 } 225 else if (tmp->ilabel == WORD_EPSILON_LABEL) 226 { 227 tmp = ARC_TOKEN_PTR(arc_token_list, tmp->first_next_arc); 228 tmp = get_arc_for_word_without_slot_annotation(tmp, word, context_void, terminal_word); 229 if (tmp) return tmp; 230 } 231 } 232 return 0; 233 } 234} 235 236/* protos */ 237#if DEBUG_PARP_MANAGEMENT 238void list_free_parps(AstarStack* stack, char* msg); 239#else 240#define list_free_parps(stack,msg) 241#endif 242 243void print_partial_paths(partial_path** parps, int num_parps, srec* rec, const char* msg); 244void print_path(partial_path* path, srec* rec, char* msg); 245void sort_partial_paths(partial_path** parps, int num_parps); 246void insert_partial_path(partial_path** parps, int *pnum_parps, 247 partial_path* insert_parp); 248partial_path* make_new_partial_path(AstarStack* stack); 249/*void free_partial_path(AstarStack* stack, partial_path* parp); put the proto in astar.h */ 250 251/* functions */ 252 253void append_arc_arriving(partial_path* path, partial_path* prev_path) 254{ 255 partial_path** pprev; 256 for (pprev = &path->first_prev_arc; (*pprev); pprev = &(*pprev)->linkl_prev_arc) 257 ASSERT(*pprev != prev_path); 258 *pprev = prev_path; 259#if DEBUG_PARP_MANAGEMENT 260 if (1) 261 { 262 int i, j; 263 partial_path* path_list[256], *k; 264 memset(path_list, 0, sizeof(partial_path*)*32); 265 for (i = 0, k = path->first_prev_arc; k; k = k->linkl_prev_arc) 266 { 267 for (j = 0; j < i; j++) 268 if (k == path_list[j]) 269 ASSERT(0); 270 path_list[i++] = k; 271 } 272 } 273#endif 274} 275 276static void remove_path_arriving(partial_path* path, partial_path* prev_path) 277{ 278 partial_path** pprev; 279 if (!path) return; 280 for (pprev = &path->first_prev_arc; (*pprev); pprev = &(*pprev)->linkl_prev_arc) 281 if (*pprev == prev_path) 282 { 283 *pprev = (*pprev)->linkl_prev_arc; 284 return; 285 } 286 ASSERT(0); 287} 288 289partial_path* extend_path(AstarStack* stack, 290 partial_path* parp, 291 wtokenID extend_token_index, 292 arc_token* arc_for_extend_token_index, 293 bigcostdata max_cost, 294 word_token* word_token_array, 295 int* pwhether_complete) 296{ 297 asr_int32_t netcost; 298 partial_path* extended_parp; 299 word_token* wtoken; 300 costdata best_cost_for_node; 301 wtokenID best_extend_token; 302 partial_path* alt_extension; 303 int sanity_count; 304 305 wtoken = &word_token_array[ extend_token_index]; 306 307 if (wtoken->end_time > word_token_array[parp->token_index].end_time) 308 { 309 /* 20030921: this should never happen but we keep it for stop gap */ 310 /* why does it happen: when in srec_process_word_boundary() we 311 occasionally kill_fsm_nodes_for_word_backtrace() but we did not kill the 312 stokens for this backtrace, neither did we kill the altword_tokens. it 313 would just take too long to go through all of them, and so occasionally 314 there may leak some bad backtraces */ 315 return 0; 316 } 317 318 /* finding the netcost of this path extension */ 319 best_extend_token = word_token_array[ parp->token_index].backtrace; 320 best_cost_for_node = word_token_array[ best_extend_token].cost; 321 wtoken = &word_token_array[ extend_token_index]; 322 ASSERT(word_token_array[best_extend_token].end_time == 323 word_token_array[extend_token_index].end_time); 324 netcost = wtoken->cost - best_cost_for_node; 325 /* ASSERT( netcost > 0); bug: sometimes this fails! fix: just use int32 */ 326 if (netcost + parp->costsofar > max_cost) 327 { 328#if PRINT_ASTAR_DETAILS 329 printf("netcost %d (%d+%d) + parp->costsofar > max_cost %d\n", 330 netcost, wtoken->cost, best_cost_for_node, parp->costsofar, max_cost); 331#endif 332 return 0; 333 } 334 335 /* check if we have a similar thing already extended */ 336 sanity_count = 0; 337 for (alt_extension = parp->first_prev_arc; alt_extension; 338 alt_extension = alt_extension->linkl_prev_arc) 339 { 340 wtokenID alt_token_index = alt_extension->token_index; 341 wtokenID alt_bt_token_index; 342 wtokenID bt_token_index; 343 word_token* alt_wtoken; 344 int join_frame_diff; /* need a signed frameID */ 345 346 ASSERT(sanity_count++ < 30); 347 if (alt_token_index == MAXwtokenID) 348 continue; 349 alt_wtoken = &word_token_array[alt_token_index]; 350 if (alt_wtoken->word != wtoken->word) 351 continue; 352 353 alt_bt_token_index = alt_wtoken->backtrace; 354 bt_token_index = wtoken->backtrace; 355 if (alt_bt_token_index == MAXwtokenID && bt_token_index != MAXwtokenID) 356 continue; 357 else if (alt_bt_token_index != MAXwtokenID && bt_token_index == MAXwtokenID) 358 continue; 359 else if (alt_bt_token_index != MAXwtokenID && bt_token_index != MAXwtokenID) 360 { 361 word_token* alt_wtoken_bt; 362 word_token* wtoken_bt; 363 alt_wtoken_bt = &word_token_array[ alt_wtoken->backtrace]; 364 wtoken_bt = &word_token_array[ wtoken->backtrace]; 365 if (alt_wtoken_bt->word != wtoken_bt->word) 366 continue; 367 } 368 369 join_frame_diff = alt_wtoken->end_time - wtoken->end_time; 370 if (join_frame_diff < 0) join_frame_diff = -join_frame_diff; 371 if (join_frame_diff > 5) 372 continue; 373 374 /* the proposed extension is similar in "all" ways to an existing 375 extension, so let's not make this new extension */ 376#if PRINT_ASTAR_DETAILS 377 printf("proposed extension already done elsewhere\n"); 378#endif 379 return 0; 380 } 381 382 /* this is a TRUE new extension, so let's extend for sure */ 383 extended_parp = make_new_partial_path(stack); 384 if (!extended_parp) 385 { 386#if PRINT_ASTAR_DETAILS 387 printf("make_new_partial_path returned 0\n"); 388#endif 389 return 0; 390 } 391 extended_parp->costsofar = parp->costsofar + netcost; 392 extended_parp->token_index = extend_token_index; 393 if (extend_token_index != MAXwtokenID) 394 extended_parp->word = word_token_array[ extend_token_index].word; 395 else 396 extended_parp->word = MAXwordID; 397 if (wtoken->backtrace == MAXwtokenID) 398 { 399 *pwhether_complete = 1; 400 extended_parp->first_prev_arc = PARP_TERMINAL; 401 } 402 else 403 { 404 *pwhether_complete = 0; 405 } 406 extended_parp->arc_for_wtoken = arc_for_extend_token_index; 407 408 extended_parp->refcount = 1; 409 parp->refcount++; 410 411 /* maintain the parp tree */ 412 extended_parp->next = parp; 413 append_arc_arriving(parp, extended_parp); 414#if PRINT_ASTAR_DETAILS 415 printf("extend path returned %x\n", extended_parp); 416#endif 417 418 return extended_parp; 419} 420 421void check_stack_root_sanity(AstarStack* stack) 422{ 423 partial_path* parp1 = stack->root_path; 424 /* append_arc_arriving(stack->root_path, parp); */ 425 for (; parp1->linkl_prev_arc != NULL; parp1 = parp1->linkl_prev_arc) 426 ASSERT(parp1 != parp1->linkl_prev_arc); 427} 428 429/* 430 * make a blank partial path, free one if necessary 431 */ 432 433partial_path* make_new_partial_path(AstarStack* stack) 434{ 435 partial_path* return_parp = stack->free_parp_list; 436 if (return_parp) 437 { 438 stack->free_parp_list = return_parp->next; 439 memset((void*)return_parp, 0, sizeof(*return_parp)); /* needed */ 440 } 441 else 442 { 443 log_report("Warning: ran out of partial_paths, reprune\n"); 444#if PRINT_ASTAR_DETAILS 445 printf("Warning: ran out of partial_paths, reprune\n"); 446#endif 447 /* kill the last one, and return it, this may free more than one parp! */ 448 if (stack->num_active_paths == 0) 449 return 0; 450 stack->num_active_paths--; 451 return_parp = stack->active_paths[stack->num_active_paths]; 452 hash_del((FixedSizeHash*)stack->pphash, return_parp); 453 free_partial_path(stack, return_parp); 454 return_parp = stack->free_parp_list; 455 stack->free_parp_list = return_parp->next; 456 memset((void*)return_parp, 0, sizeof(*return_parp)); /* needed */ 457 } 458 return return_parp; 459} 460 461/* free_partial_path 462 frees the path that was passed in, and also any backpointers. 463 refcount counts the number of parps that depend on this parp */ 464void free_partial_path(AstarStack* stack, partial_path* parp) 465{ 466 partial_path* next_parp; 467 for (; parp; parp = next_parp) 468 { 469 next_parp = parp->next; 470 parp->refcount--; 471 if (parp->refcount == 0) 472 { /* first time around this always passes */ 473 remove_path_arriving(parp->next, parp); 474 parp->next = stack->free_parp_list; 475 stack->free_parp_list = parp; 476 } 477 else break; 478 } 479} 480 481/* 482 * make a partial path from a single word at the very end of the graph 483 */ 484 485partial_path* make_partial_path(AstarStack* stack, 486 wtokenID token_index, srec* rec, 487 int* pwhether_complete) 488{ 489 partial_path* parp; 490 word_token* wtoken; 491 492 /* todo: replace this with partial_path_tokens! */ 493 parp = make_new_partial_path(stack); 494 if (!parp) return parp; 495 496 wtoken = &rec->word_token_array[token_index]; 497 parp->token_index = token_index; 498 if (token_index != MAXwtokenID) 499 parp->word = rec->word_token_array[ token_index].word; 500 else 501 parp->word = MAXwordID; 502 /* wtoken->end_time should be equal to rec->current_search_frame */ 503 ASSERT(rec->accumulated_cost_offset[ wtoken->end_time] != 0); 504 parp->costsofar = rec->accumulated_cost_offset[ wtoken->end_time]; 505 parp->costsofar += wtoken->cost; 506 /* BKWD: rec->word_token_array[ wtoken->backtrace].cost + acc[] */ 507 /* FRWD: wtoken->cost + acc[] - rec->word_token_array[ wtoken->backtrace].cost + acc[] */ 508 parp->next = 0; 509 parp->first_prev_arc = parp->linkl_prev_arc = 0; 510 if (wtoken->backtrace == MAXwtokenID) 511 *pwhether_complete = 1; 512 else 513 *pwhether_complete = 0; 514 parp->arc_for_wtoken = 0; 515 parp->refcount = 1; 516 return parp; 517} 518 519/* initialize astar search */ 520 521AstarStack* astar_stack_make(srec* rec, int max_nbest_len) 522{ 523 int i; 524 AstarStack *stack; 525 526 /* allocations */ 527 stack = (AstarStack*)CALLOC_CLR(1, sizeof(AstarStack), "search.astar.base"); 528 stack->max_active_paths = max_nbest_len + NBEST_LEN_MARGIN * 3; 529 stack->max_complete_paths = max_nbest_len + NBEST_LEN_MARGIN; 530 stack->complete_paths = (partial_path**)CALLOC_CLR(stack->max_complete_paths, sizeof(partial_path*), "search.astar.cplist"); 531 stack->complete_path_confidences = (int*)CALLOC_CLR(stack->max_complete_paths, sizeof(int), "search.astar.confvalues"); 532 stack->active_paths = (partial_path**)CALLOC_CLR(stack->max_active_paths, sizeof(partial_path*), "search.astar.aplist"); 533 stack->prune_delta = ASTAR_PRUNE_DELTA; 534 535 stack->num_complete_paths = 0; 536 stack->num_active_paths = 0; 537 538 stack->partial_path_array = (partial_path*)CALLOC_CLR(MAX_NUM_PARPS, sizeof(stack->partial_path_array[0]), "search.astar.pparray"); 539 stack->partial_path_array_size = MAX_NUM_PARPS; 540 541 stack->free_parp_list = &stack->partial_path_array[0]; 542 for (i = 1; i < MAX_NUM_PARPS; i++) 543 { 544 stack->partial_path_array[i-1].next = &stack->partial_path_array[i]; 545 } 546 stack->partial_path_array[i-1].next = 0; 547 stack->root_path = 0; 548 549 stack->pphash = (void*)CALLOC_CLR(1, sizeof(FixedSizeHash), "search.astar.pphash"); 550 astar_stack_clear(stack); 551 return stack; 552} 553 554int astar_stack_destroy(srec* rec) 555{ 556 AstarStack *stack = rec->astar_stack; 557 FREE(stack->active_paths); 558 FREE(stack->complete_paths); 559 FREE(stack->complete_path_confidences); 560 FREE(stack->partial_path_array); 561 FREE(stack->pphash); 562 FREE(stack); 563 rec->astar_stack = 0; 564 return 0; 565} 566 567/* prepares for astar after forward search on an utterance */ 568 569int astar_stack_prepare(AstarStack* stack, int request_nbest_len, srec* rec) 570{ 571 wtokenID token_index; 572 word_token* wtoken; 573 partial_path* parp; 574 int whether_complete; 575 frameID end_frame = rec->current_search_frame; 576 int num_wordends; 577 578 list_free_parps(stack, "astar_stack_prepare "); 579 580 stack->num_active_paths = 0; 581 stack->num_complete_paths = 0; 582 583 stack->root_path = make_new_partial_path(stack); 584 ASSERT(stack->root_path); 585 stack->root_path->refcount = 9999; 586 stack->root_path->token_index = MAXwtokenID; 587 stack->root_path->word = MAXwordID; 588 589 num_wordends = 0; 590 for (token_index = rec->word_lattice->words_for_frame[end_frame]; 591 token_index != MAXwtokenID; 592 token_index = wtoken->next_token_index) 593 { 594 num_wordends++; 595 wtoken = &rec->word_token_array[ token_index]; 596 parp = make_partial_path(stack, token_index, rec, &whether_complete); 597 if (!parp) 598 { 599 log_report("Error: out-of-memory in astar_stack_prepare(), " 600 "num_wordends %d\n", num_wordends); 601 stack->num_complete_paths = 0; 602 return 1; 603 } 604 append_arc_arriving(stack->root_path, parp); 605 606 if (parp && whether_complete) 607 { 608 /* here .. check for dups ?? */ 609 stack->complete_paths[ stack->num_complete_paths++] = parp; 610 if (stack->num_complete_paths == request_nbest_len) 611 return 0; 612 } 613 else if (parp) 614 { 615 stack->active_paths[ stack->num_active_paths++] = parp; 616 } 617 } 618 619 list_free_parps(stack, "astar_stack_prepare "); 620 621 return 0; 622} 623 624/* cleans up astar after an utterance */ 625 626void astar_stack_clear(AstarStack* stack) 627{ 628 int i; 629 630 /* free the partial_path's that were allocated */ 631 for (i = 0; i < stack->num_active_paths; i++) 632 free_partial_path(stack, stack->active_paths[i]); 633 for (i = 0; i < stack->num_complete_paths; i++) 634 free_partial_path(stack, stack->complete_paths[i]); 635 if (stack->root_path) 636 free_partial_path(stack, stack->root_path); 637 638 /* this shouldn't be necessary, but there are a couple of bugs 639 in parp management, so let's leave it for now */ 640 stack->free_parp_list = &stack->partial_path_array[0]; 641 for (i = 1; i < MAX_NUM_PARPS; i++) 642 stack->partial_path_array[i-1].next = &stack->partial_path_array[i]; 643 stack->partial_path_array[i-1].next = 0; 644 stack->num_active_paths = 0; 645 stack->num_complete_paths = 0; 646 stack->root_path = 0; 647 648 list_free_parps(stack, "astar_stack_clear "); 649 650} 651 652/* do the astar search */ 653 654int astar_stack_do_backwards_search(srec* rec, int request_nbest_len) 655{ 656 int i; 657 AstarStack *stack = rec->astar_stack; 658 word_token *wtoken, *btoken; 659 partial_path *parp, *extended_parp, *tparp; 660 wtokenID token_index, btoken_index; 661 int whether_complete = 0; 662 bigcostdata max_cost = 0; 663 arc_token* arc_for_token_index = NULL; 664 665 arc_token* arc_token_list; /* to skip graph constraints, just set this to NULL */ 666 arcID arc_token_list_len; 667 srec_word_lattice* lattice; 668 669 int max_complete_paths; 670 671 if (!rec || !rec->context) 672 { 673 log_report("Error: bad arguments in astar_stack_do_backwards_search()\n"); 674 return 1; 675 } 676 max_complete_paths = request_nbest_len < stack->max_complete_paths ? 677 request_nbest_len : stack->max_complete_paths; 678 679 arc_token_list = rec->context->arc_token_list; 680 arc_token_list_len = rec->context->arc_token_list_len; 681 lattice = rec->word_lattice; 682 683 /* initialization, now from calling function */ 684 /* astar_stack_prepare(stack, request_nbest_len, rec); */ 685 hash_init((FixedSizeHash*)stack->pphash, rec); 686 687 /* search */ 688 while (stack->num_active_paths > 0) 689 { 690 691 list_free_parps(stack, "do_astar_back BEG"); 692 693 /* extend top path */ 694 parp = stack->active_paths[0]; 695 wtoken = &rec->word_token_array[parp->token_index]; 696 token_index = wtoken->backtrace; 697 wtoken = &rec->word_token_array[token_index]; 698 ASSERT(token_index != MAXwtokenID); /* should have been "complete" */ 699 700 701#if PRINT_ASTAR_DETAILS 702 print_partial_paths(stack->complete_paths, stack->num_complete_paths, 703 rec, "=== Complete Paths ===\n"); 704 print_partial_paths(stack->active_paths, stack->num_active_paths, 705 rec, "=== Active Paths ===\n"); 706#endif 707 708 /* pop this one */ 709 for (i = 0; i < stack->num_active_paths - 1; i++) 710 stack->active_paths[i] = stack->active_paths[i+1]; 711 stack->num_active_paths--; 712 713 if (wtoken->end_time != MAXframeID) 714 { 715 /* sort the word token array by score, so that we pick the best 716 scoring paths first */ 717 /* later add a 'whether_sorted' flag to the lattice_at_frame information */ 718 sort_word_lattice_at_frame(rec, (frameID)(wtoken->end_time + 1)); 719 720 /* extend this path, with every word ending where this word began */ 721 /* #warning there appear to be duplicates */ 722 723 btoken_index = lattice->words_for_frame[ wtoken->end_time+1]; 724 } 725 else 726 { 727 btoken_index = MAXwtokenID; 728 } 729 730#if PRINT_ASTAR_DETAILS 731 print_path(parp, rec, "Now Processing Top of Stack(2): "); 732 printf("Frame %d\n", wtoken->end_time + 1); 733 print_word_token_list(rec, btoken_index, "List of Word at Frame\n"); 734#endif 735 736 for (; btoken_index != MAXwtokenID; btoken_index = btoken->next_token_index) 737 { 738 btoken = &rec->word_token_array[btoken_index]; 739 740 /* alternate choice must end at same frame */ 741 // ASSERT(btoken->end_time == wtoken->end_time); 742 743#if PRINT_ASTAR_DETAILS 744 print_path(parp, rec, "Now Processing Top of Stack(3): "); 745 print_word_token(rec, btoken_index, "Extending word "); 746#endif 747 748 /* check if this potential extension is allowed by the 749 word graph, if not just drop it! */ 750 751 if (arc_token_list) 752 { 753 arc_for_token_index = get_arc_for_word(parp->arc_for_wtoken, 754 btoken->word, 755 rec->context, 756 rec->context->beg_silence_word); 757 if (arc_for_token_index == NULL) 758 { 759#if PRINT_ASTAR_DETAILS 760 printf("Not allowed by graph!\n"); 761#endif 762 continue; 763 } 764 } 765 766 /* figure out the cost to beat ! */ 767 if (stack->num_complete_paths) 768 { 769 max_cost = stack->complete_paths[0]->costsofar + stack->prune_delta; 770 } 771 else if (stack->num_active_paths == stack->max_active_paths) 772 { 773 max_cost = stack->active_paths[ stack->num_active_paths-1]->costsofar; 774 } 775 else if (stack->num_active_paths > 0) 776 { 777 max_cost = stack->active_paths[0]->costsofar + stack->prune_delta; 778 } 779 else 780 { 781 max_cost = MAXbcostdata; 782 } 783 784 extended_parp = extend_path(stack, parp, btoken_index, arc_for_token_index, max_cost, rec->word_token_array, &whether_complete); 785 786 if (extended_parp) 787 { 788 int fsh_rc = hash_set((FixedSizeHash*)stack->pphash, extended_parp); 789 if (fsh_rc == FSH_KEY_OCCUPIED) 790 { 791 /* seen this path before, let's not bother with it */ 792#if PRINT_ASTAR_DETAILS 793 print_path(extended_parp, rec, "dup!! "); 794#endif 795 free_partial_path(stack, extended_parp); 796 extended_parp = 0; 797 } 798 } 799 800 if (extended_parp && whether_complete) 801 { 802 ASSERT(stack->num_complete_paths < stack->max_complete_paths); 803 stack->complete_paths[ stack->num_complete_paths++] = extended_parp; 804 /*if(stack->num_complete_paths >= request_nbest_len) 805 return 0;*/ 806 807 808#if PRINT_ASTAR_DETAILS 809 print_path(extended_parp, rec, "&&Extended, complete : "); 810#endif 811 } 812 else if (extended_parp) 813 { 814 /* todo: check if this extended_parp is already completed on the 815 stack->complete_paths, if so just rejoin with that guy somehow */ 816 817#if PRINT_ASTAR_DETAILS 818 print_path(extended_parp, rec, "&&Extended, incomplete : "); 819#endif 820 if (stack->num_active_paths == stack->max_active_paths) 821 { 822 /* kill the last one */ 823 stack->num_active_paths--; 824 tparp = stack->active_paths[stack->num_active_paths]; 825 hash_del((FixedSizeHash*)stack->pphash, tparp); 826 free_partial_path(stack, tparp); 827 } 828 insert_partial_path(stack->active_paths, &stack->num_active_paths, 829 extended_parp); 830 } 831#if PRINT_ASTAR_DETAILS 832 else 833 { 834 printf("&&Extended, cost too high (>%d):\n", max_cost); 835 } 836#endif 837 if (stack->num_complete_paths == max_complete_paths) 838 { 839#if PRINT_ASTAR_DETAILS 840 printf("Complete paths are full %d, stopping\n", stack->num_complete_paths); 841#endif 842 break; 843 } 844 } 845#if PRINT_ASTAR_DETAILS 846 if (do_draw_as_dotty > 0) 847 { 848 char tmp[32]; 849 sprintf(tmp, "astar.%.3d.dot", do_draw_file_idx++); 850 astar_draw_tree_as_dotty(tmp, rec, stack); 851 if (do_draw_as_dotty > 1) 852 system("C:/tools/graphviz/bin/dotty.exe astar.dot"); 853 } 854#endif 855 856 SREC_STATS_UPDATE_ASTAR(stack); 857 hash_del((FixedSizeHash*)stack->pphash, parp); 858 free_partial_path(stack, parp); /* done all extensions, now free */ 859 if (stack->num_complete_paths == max_complete_paths) 860 { 861#if PRINT_ASTAR_DETAILS 862 printf("Complete paths are full %d, stopping\n", stack->num_complete_paths); 863#endif 864 break; 865 } 866 867 list_free_parps(stack, "do_astar_back END"); 868 } 869 sort_partial_paths(stack->complete_paths, stack->num_complete_paths); 870 /* if we're doing a search within a grammar, then print the complete choices 871 else we're likely just doing reprune_word_tokens() */ 872#if PRINT_ASTAR_SOMEWHAT 873 if (rec->context->arc_token_list) 874 print_partial_paths(stack->complete_paths, stack->num_complete_paths, 875 rec, "=== Complete paths ===\n"); 876#endif 877 /* now the caller must call clear */ 878 /* astar_stack_clear(stack); */ 879 880 return 0; 881} 882 883void sort_partial_paths(partial_path** parps, int num_parps) 884{ 885 int i, j; 886 for (i = 0; i < num_parps; i++) 887 { 888 for (j = 0; j < num_parps - 1; j++) 889 { 890 if (parps[j]->costsofar > parps[j+1]->costsofar) 891 { 892 partial_path* parp = parps[j]; 893 parps[j] = parps[j+1]; 894 parps[j+1] = parp; 895 } 896 } 897 } 898} 899 900void insert_partial_path(partial_path** parps, int *pnum_parps, partial_path* insert_parp) 901{ 902 int i, j, insert_index; 903 int num_parps = *pnum_parps; 904 905 /* maintain the list sorted, search the list linearly for now, 906 do priority_q type heap later */ 907 insert_index = num_parps; 908 for (i = 0; i < num_parps; i++) 909 { 910 if (insert_parp->costsofar < parps[i]->costsofar) 911 { 912 insert_index = i; 913 break; 914 } 915 } 916 for (j = num_parps; j > insert_index; --j) 917 parps[j] = parps[j-1]; 918 parps[j] = insert_parp; 919 num_parps++; 920 *pnum_parps = num_parps; 921} 922 923void print_path(partial_path* ipath, srec* rec, char* msg) 924{ 925 partial_path* path; 926 word_token* wtoken; 927 word_token* last_wtoken; 928 char* p; 929 char trans[256]; 930 int max_trans_len = 255; 931 int rc; 932#ifndef NDEBUG 933 int sanity_count = 0; 934#endif 935 frameID end_time; 936 937 PLogMessage("%spath score=%d ", msg, ipath->costsofar); 938 939 rc = sprint_word_token_backtrace(trans, max_trans_len, rec, ipath->token_index); 940 ASSERT(rc == 0); 941 942 last_wtoken = 0; 943 end_time = (ipath && ipath->token_index != MAXwtokenID) ? rec->word_token_array[ipath->token_index].end_time : MAXframeID; 944#if SHOW_END_TIMES 945 printf("%s@%d || ", trans, end_time); 946#else 947 printf("%s || ", trans); 948#endif 949 950 path = ipath->next; /* we've already printed this thing */ 951 for (; path; path = path->next) 952 { 953 ASSERT(sanity_count++ < 256); 954 if (path->token_index == MAXwtokenID) break; 955 wtoken = &rec->word_token_array[ path->token_index]; 956 p = "NULL"; 957 if (rec->context->olabels->words[wtoken->word]) 958 p = rec->context->olabels->words[wtoken->word]; 959#if SHOW_END_TIMES 960 printf("%s@%d ", p, wtoken->end_time); 961#else 962 printf("%s ", p); 963#endif 964 if (last_wtoken != NULL) 965 { 966 if (wtoken->end_time < last_wtoken->end_time) 967 { 968 printf(" Error: wt%d < lwt%d\n", wtoken->end_time, last_wtoken->end_time); 969 pfflush(PSTDOUT); 970 ASSERT(0); 971 } 972 } 973 last_wtoken = wtoken; 974 } 975 printf("\n"); 976} 977 978void print_partial_paths(partial_path** parps, int num_parps, 979 srec* rec, const char* msg) 980{ 981 int i; 982 char buf[32]; 983 printf("%s", msg); 984 for (i = 0; i < num_parps; i++) 985 { 986 sprintf(buf, "%.3d ", i); 987 print_path(parps[i], rec, buf); 988 } 989} 990 991 992/*--------------------------------------------------------------------------* 993 * * 994 * visualization .. sometimes helps debugging * 995 * * 996 *--------------------------------------------------------------------------*/ 997 998#if PRINT_ASTAR_DETAILS 999 1000 1001int astar_draw_arc_as_dotty(FILE* fp, partial_path* parp, int src_node, int *nodes_used, srec* rec) 1002{ 1003 word_token* word_token_array = rec->word_token_array; 1004 partial_path* parp1; 1005 int sanity_count = 0; 1006 int to_nodes[32], *pto_nodes, inode; 1007 pto_nodes = to_nodes; 1008 1009 fprintf(fp, "%d [label = \"%d\", shape = circle, style = solid]\n", 1010 src_node, src_node); 1011 for (parp1 = parp->first_prev_arc; parp1; parp1 = parp1->linkl_prev_arc) 1012 { 1013 arc_token* arc = parp1->arc_for_wtoken; 1014 for (inode = 0; nodes_used[inode]; inode++) ; 1015 nodes_used[inode] = 1; 1016 *pto_nodes++ = inode; 1017 fprintf(fp, " %d -> %d [label = \"(%d).%d.%s@%d/%d\"];\n", 1018 src_node, inode, parp1->refcount, 1019 parp1->token_index, 1020 (arc && arc != (arc_token*)1) ? rec->context->olabels->words[arc->ilabel] : "NULL", 1021 word_token_array[ parp1->token_index].end_time, 1022 parp1->costsofar); 1023 if (sanity_count++ > 30) break; 1024 } 1025 1026 pto_nodes = to_nodes; 1027 sanity_count = 0; 1028 for (parp1 = parp->first_prev_arc; parp1; parp1 = parp1->linkl_prev_arc) 1029 { 1030 astar_draw_arc_as_dotty(fp, parp1, *pto_nodes, nodes_used, rec); 1031 pto_nodes++; 1032 if (sanity_count++ > 30) break; 1033 } 1034 return 0; 1035} 1036 1037int astar_draw_tree_as_dotty(const char* file, srec* rec, AstarStack* stack) 1038{ 1039 word_token* word_token_array = rec->word_token_array; 1040 FILE* fp = fopen(file, "w"); 1041 partial_path* parp; 1042 int nodes_used[1024]; 1043 memset(nodes_used, 0, 1024*sizeof(int)); 1044 1045 fprintf(fp, 1046 "digraph FSM {\n" 1047 "rankdir = LR;\n" 1048 "size = \"8.5,11\";\n" 1049 "fontsize = 14;\n" 1050 "label = \"\\n\\naaron.PCLG\"\n" 1051 "center = 1;\n" 1052 "orientation = Landscape\n" 1053 ); 1054 nodes_used[0] = 1; 1055 for (parp = stack->root_path; parp; parp = parp->linkl_prev_arc) 1056 astar_draw_arc_as_dotty(fp, parp, 0, nodes_used, rec); 1057 1058 fprintf(fp, "}\n"); 1059 fclose(fp); 1060 printf("....... dotty %s ......\n", file); 1061 return 0; 1062} 1063 1064#endif 1065 1066/*--------------------------------------------------------------------------* 1067 * * 1068 * functions relating to in-recognition backtrace * 1069 * * 1070 *--------------------------------------------------------------------------*/ 1071 1072int maybe_add_to_active_paths(AstarStack* stack, word_token* word_token_array, bigcostdata cost, wtokenID wtoken_index); 1073int astar_stack_prepare_from_active_search(AstarStack* stack, int nbestlen, srec* rec) 1074{ 1075 wtokenID wtoken_index; 1076 ftokenID ftoken_index; 1077 fsmnode_token* ftoken; 1078 stokenID stoken_index; 1079 fsmarc_token* stoken; 1080 /* word_token* wtoken; */ 1081 frameID prune_frame = rec->current_search_frame; 1082 int i, rc = 0, rc1 = 0; 1083 bigcostdata parp_costsofar; 1084 1085 stack->num_active_paths = 0; 1086 stack->num_complete_paths = 0; 1087 stack->root_path = 0; 1088 1089 /* put it on the stack */ 1090 stack->root_path = make_new_partial_path(stack); 1091 ASSERT(stack->root_path); 1092 stack->root_path->refcount = 9999; 1093 stack->root_path->token_index = MAXwtokenID; 1094 stack->root_path->word = MAXwordID; 1095 1096 ftoken_index = rec->active_fsmnode_tokens; 1097 for (; ftoken_index != MAXftokenID; ftoken_index = ftoken->next_token_index) 1098 { 1099 ftoken = &rec->fsmnode_token_array[ ftoken_index]; 1100 wtoken_index = ftoken->word_backtrace; 1101 if (wtoken_index == MAXwtokenID) 1102 continue; 1103 1104 /* fix the score */ 1105 parp_costsofar = ftoken->cost; 1106 parp_costsofar += rec->accumulated_cost_offset[ prune_frame]; 1107 1108 rc += (rc1 = maybe_add_to_active_paths(stack, rec->word_token_array, parp_costsofar, wtoken_index)); 1109 /* we can handle that a path was not added for this ftoken because 1110 we made sure to flag the wtokens along it's top backtrace */ 1111 } 1112 1113 stoken_index = rec->active_fsmarc_tokens; 1114 for (; stoken_index != MAXstokenID; stoken_index = stoken->next_token_index) 1115 { 1116 stoken = &rec->fsmarc_token_array[ stoken_index]; 1117 for (i = 0; i < stoken->num_hmm_states; i++) 1118 { 1119 wtoken_index = stoken->word_backtrace[i]; 1120 if (wtoken_index == MAXwtokenID) 1121 continue; 1122 parp_costsofar = stoken->cost[i]; 1123 parp_costsofar += rec->accumulated_cost_offset[ prune_frame]; 1124 1125 rc += (rc1 = maybe_add_to_active_paths(stack, rec->word_token_array, parp_costsofar, wtoken_index)); 1126 /* we can handle that a path was not added for this stoken because 1127 we made sure to flag the wtokens along it's top backtrace */ 1128 } 1129 } 1130 1131#if PRINT_ASTAR_DETAILS 1132 print_partial_paths(stack->active_paths, stack->num_active_paths, 1133 rec, "== active paths before sorting ==\n"); 1134 sort_partial_paths(stack->active_paths, stack->num_active_paths); 1135 print_partial_paths(stack->active_paths, stack->num_active_paths, 1136 rec, "== active paths after sorting ==\n"); 1137#endif 1138 list_free_parps(stack, "astar_prepare_from_active_search"); 1139 return 0; 1140} 1141 1142int maybe_add_to_active_paths(AstarStack* stack, word_token* word_token_array, bigcostdata parp_costsofar, wtokenID wtoken_index) 1143{ 1144 int i; 1145 int replace_index; 1146 int inserts_index; 1147 partial_path* parp; 1148 word_token* wtoken; 1149 wtoken = &word_token_array[ wtoken_index]; 1150 1151 if (wtoken->backtrace == MAXwtokenID) 1152 { 1153 return 0; 1154 } 1155 1156 /* see if we already have this word token backtrace */ 1157 replace_index = -1; 1158 inserts_index = -1; 1159 for (i = 0; i < stack->num_active_paths; i++) 1160 { 1161 if (stack->active_paths[i]->token_index == wtoken_index) 1162 { 1163 if (parp_costsofar < stack->active_paths[i]->costsofar) 1164 { 1165 /* this one is better than another we already have! */ 1166 replace_index = i; 1167 if (inserts_index < 0) 1168 inserts_index = i; 1169 } 1170 break; 1171 } 1172 else if (parp_costsofar < stack->active_paths[i]->costsofar) 1173 { 1174 if (inserts_index < 0) 1175 inserts_index = i; 1176 } 1177 } 1178#if PRINT_ASTAR_QBT_DETAILS 1179 printf("maybe_add replace %d insert %d\n", replace_index, inserts_index); 1180#endif 1181 1182 if (replace_index >= 0) 1183 { 1184 free_partial_path(stack, stack->active_paths[replace_index]); 1185 /* stack->active_paths[replace_index] = 0; */ 1186 for (i = replace_index; i > inserts_index; --i) 1187 stack->active_paths[i] = stack->active_paths[i-1]; 1188 stack->active_paths[inserts_index] = 0; 1189 } 1190 else if (inserts_index >= 0) 1191 { 1192 if (stack->num_active_paths == stack->max_active_paths) 1193 { 1194 free_partial_path(stack, stack->active_paths[ stack->num_active_paths-1]); 1195 stack->num_active_paths--; 1196 } 1197 for (i = stack->num_active_paths; i > inserts_index; --i) 1198 stack->active_paths[i] = stack->active_paths[i-1]; 1199 stack->active_paths[inserts_index] = 0; 1200 stack->num_active_paths++; 1201 } 1202 else if (stack->num_active_paths < stack->max_active_paths) 1203 { 1204 /* append if there's space */ 1205 inserts_index = stack->num_active_paths; 1206 stack->num_active_paths++; 1207 stack->active_paths[inserts_index] = 0; 1208 } 1209 else 1210 { 1211 /* no space */ 1212 return 1; 1213 } 1214 1215 /* create a parp */ 1216#if PRINT_ASTAR_QBT_DETAILS 1217 printf("maybe_add .. creating new parp %d\n", parp_costsofar); 1218#endif 1219 /* this should always succeed because of above frees */ 1220 ASSERT(stack->free_parp_list); 1221 parp = make_new_partial_path(stack); 1222 parp->token_index = wtoken_index; 1223 if (wtoken_index != MAXwtokenID) 1224 parp->word = word_token_array[ wtoken_index].word; 1225 else 1226 parp->word = MAXwordID; 1227 parp->next = stack->root_path; 1228 parp->first_prev_arc = parp->linkl_prev_arc = 0; 1229 parp->arc_for_wtoken = 0; 1230 parp->refcount = 1; 1231 parp->costsofar = parp_costsofar; 1232 1233 stack->active_paths[ inserts_index] = parp; 1234 1235#if PRINT_ASTAR_QBT_DETAILS 1236 printf("maybe_add .. appending to root\n"); 1237#endif 1238 append_arc_arriving(stack->root_path, parp); 1239 return 0; 1240} 1241 1242 1243int astar_stack_flag_word_tokens_used(AstarStack* stack, srec* rec) 1244{ 1245 int i; 1246 wtokenID wtoken_index; 1247 partial_path* parp; 1248 int num_flagged_by_path; 1249 1250#if PRINT_ASTAR_QBT_DETAILS 1251 print_partial_paths(stack->complete_paths, stack->num_complete_paths, 1252 rec, "=== Complete QBT paths ===\n"); 1253#endif 1254 1255 for (i = 0; i < stack->num_complete_paths; i++) 1256 { 1257 num_flagged_by_path = 0; 1258 for (parp = stack->complete_paths[i]; parp; parp = parp->next) 1259 { 1260 wtoken_index = parp->token_index; 1261 if (wtoken_index == MAXwtokenID) break; 1262 rec->word_token_array_flags[ wtoken_index]++; 1263 if (rec->word_token_array_flags[wtoken_index] > 0) 1264 { 1265 num_flagged_by_path++; 1266#if PRINT_ASTAR_QBT_DETAILS 1267 printf("%d ", wtoken_index); 1268#endif 1269 } 1270 } 1271 1272 /* also flag the main backtrace of every word token */ 1273 /* we do we need this? I'm not sure, but it appears 1274 that some backtrace tokens are not flagged for whatever 1275 reason. It's worth revisiting this when other bugs are 1276 are resolved. This is in a separate loop from the 1277 above because it allows us to detect that this is 1278 happening in the first place */ 1279 for (parp = stack->complete_paths[i]; parp; parp = parp->next) 1280 { 1281 word_token *btoken, *last_btoken; 1282 wtokenID btoken_index; 1283 wtoken_index = parp->token_index; 1284 if (wtoken_index == MAXwtokenID) break; 1285 last_btoken = NULL; 1286 btoken = &rec->word_token_array[ wtoken_index]; 1287 btoken_index = btoken->backtrace; 1288 for (; btoken_index != MAXwtokenID; btoken_index = btoken->backtrace) 1289 { 1290 btoken = &rec->word_token_array[ btoken_index]; 1291 rec->word_token_array_flags[ btoken_index]++; 1292 if (rec->word_token_array_flags[ btoken_index] == 1) 1293 { 1294 num_flagged_by_path++; 1295#if PRINT_ASTAR_QBT_DETAILS 1296 printf("%db ", btoken_index); 1297#endif 1298 } 1299 if (last_btoken && last_btoken->end_time <= btoken->end_time) 1300 { 1301 PLogError("bad looping path encountered, breaking"); 1302 break; 1303 } 1304 last_btoken = btoken; 1305 } 1306 } 1307 1308#if PRINT_ASTAR_QBT_DETAILS 1309 printf("complete path %.3d flagged %d\n", i, num_flagged_by_path); 1310#endif 1311 } 1312 return 0; 1313} 1314 1315 1316#if DEBUG_PARP_MANAGEMENT 1317#define PARP_FREE 1 1318#define PARP_USED 2 1319void list_free_parps(AstarStack* stack, char* msg) 1320{ 1321 partial_path* parp; 1322 int i, num = 0; 1323 char x[MAX_NUM_PARPS]; 1324 for (i = 0; i < MAX_NUM_PARPS; i++) x[i] = 0; 1325 for (parp = stack->free_parp_list; parp; parp = parp->next) 1326 { 1327 num++; 1328 x[(parp-stack->partial_path_array)] = PARP_FREE; 1329 } 1330 PLogMessage("%sstack->free_parp_list size %d ", msg, num); 1331 PLogMessage("active %d complete %d\n", stack->num_active_paths, stack->num_complete_paths); 1332 1333 for (i = 0; i < stack->num_active_paths; i++) 1334 { 1335 parp = stack->active_paths[i]; 1336 for (; parp; parp = parp->next) x[(parp-stack->partial_path_array)] = PARP_USED; 1337 } 1338 for (i = 0; i < stack->num_complete_paths; i++) 1339 { 1340 parp = stack->complete_paths[i]; 1341 for (; parp; parp = parp->next) x[(parp-stack->partial_path_array)] = PARP_USED; 1342 } 1343 if (stack->root_path) 1344 x[(stack->root_path-stack->partial_path_array)] = PARP_USED; 1345 printf("free: "); 1346 for (i = 0; i < MAX_NUM_PARPS; i++) if (x[i] == PARP_FREE) printf(" %d", i); 1347 printf("\n"); 1348 printf("used: "); 1349 for (i = 0; i < MAX_NUM_PARPS; i++) if (x[i] == PARP_USED) printf(" %d", i); 1350 printf("\n"); 1351 for (i = 0, num = 0; i < MAX_NUM_PARPS; i++) if (!x[i]) num++; 1352 printf("unaccounted for %d\n", num); 1353 ASSERT(num == 0); 1354 1355} 1356#endif 1357