BRL-CAD
nmg_bool.c
Go to the documentation of this file.
1 /* N M G _ B O O L . C
2  * BRL-CAD
3  *
4  * Copyright (c) 1993-2014 United States Government as represented by
5  * the U.S. Army Research Laboratory.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public License
9  * version 2.1 as published by the Free Software Foundation.
10  *
11  * This library is distributed in the hope that it will be useful, but
12  * WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this file; see the file named COPYING for more
18  * information.
19  */
20 
21 /** @addtogroup nmg */
22 /** @{ */
23 /** @file primitives/nmg/nmg_bool.c
24  *
25  * Support for boolean operations on NMG objects. Most of the
26  * routines in here are static/local to this file. The interfaces
27  * here are the functions "nmg_do_bool" and "nmg_mesh_faces". The
28  * former does boolean operations on a pair of shells. The latter is
29  * a function to make edges shared between two faces whenever
30  * possible.
31  *
32  */
33 
34 #include "common.h"
35 
36 #include <stdlib.h>
37 #include <math.h>
38 #include <string.h>
39 #include "bio.h"
40 
41 #include "vmath.h"
42 #include "nmg.h"
43 #include "raytrace.h"
44 #include "plot3.h"
45 
46 
47 extern int nmg_class_nothing_broken;
48 
49 /* XXX Move to nmg_manif.c or nmg_ck.c */
51  char *visited;
52  const char *manifolds;
53  int count;
54 };
55 
56 
58 
59 
60 /**
61  * Find open edges, if any, in NMG object pointed to by magic_p and
62  * create a UNIX plot file containing these edges.
63  *
64  * The prefix string will be appended to the front of the file name
65  * of the plot file.
66  *
67  * Returns -
68  * 0 No open edges, no plot file created.
69  * !0 Has open edges, plot file created.
70  */
71 size_t
72 nmg_plot_open_edges(const uint32_t *magic_p, const char *prefix)
73 {
74  struct loopuse *lu;
75  struct edgeuse *eu;
76  const struct edgeuse *eur;
77  struct faceuse *newfu;
78  struct bu_ptbl faces;
79  struct face *fp;
80  struct faceuse *fu, *fu1, *fu2;
81  int done;
82  const char *manifolds = NULL;
83  point_t pt1, pt2;
84  size_t i;
85  FILE *plotfp = NULL;
86  struct bu_vls plot_file_name = BU_VLS_INIT_ZERO;
87  size_t cnt;
88 
89  bu_ptbl_init(&faces, 64, "faces buffer");
90  nmg_face_tabulate(&faces, magic_p);
91 
92  cnt = 0;
93  for (i = 0; i < (size_t)BU_PTBL_END(&faces) ; i++) {
94  fp = (struct face *)BU_PTBL_GET(&faces, i);
95  NMG_CK_FACE(fp);
96  fu = fu1 = fp->fu_p;
97  NMG_CK_FACEUSE(fu1);
98  fu2 = fp->fu_p->fumate_p;
99  NMG_CK_FACEUSE(fu2);
100  done = 0;
101  while (!done) {
102  NMG_CK_FACEUSE(fu);
103  for (BU_LIST_FOR(lu, loopuse, &fu->lu_hd)) {
104  NMG_CK_LOOPUSE(lu);
105  if (BU_LIST_FIRST_MAGIC(&lu->down_hd) == NMG_EDGEUSE_MAGIC) {
106  for (BU_LIST_FOR(eu, edgeuse, &lu->down_hd)) {
107  NMG_CK_EDGEUSE(eu);
109  newfu = eur->up.lu_p->up.fu_p;
110  while (manifolds &&
111  NMG_MANIFOLDS(manifolds, newfu) &
112  NMG_2MANIFOLD &&
113  eur != eu->eumate_p) {
114  eur = nmg_radial_face_edge_in_shell(eur->eumate_p);
115  newfu = eur->up.lu_p->up.fu_p;
116  }
117  if (eur == eu->eumate_p) {
118  VMOVE(pt1, eu->vu_p->v_p->vg_p->coord);
119  VMOVE(pt2, eu->eumate_p->vu_p->v_p->vg_p->coord);
120  if (!plotfp) {
121  bu_vls_sprintf(&plot_file_name, "%s.%p.pl", prefix, (void *)magic_p);
122  if ((plotfp = fopen(bu_vls_addr(&plot_file_name), "wb")) == (FILE *)NULL) {
123  bu_log("nmg_plot_open_edges(): Unable to create plot file (%s)\n", bu_vls_addr(&plot_file_name));
124  bu_bomb("nmg_plot_open_edges(): Unable to create plot file.");
125  }
126  }
127  pdv_3line(plotfp, pt1, pt2);
128  cnt++;
129  }
130  }
131  }
132  }
133  if (fu == fu1) fu = fu2;
134  if (fu == fu2) done = 1;
135  };
136 
137  }
138 
139  if (plotfp) {
140  (void)fclose(plotfp);
141  bu_vls_free(&plot_file_name);
142  }
143 
144  bu_ptbl_free(&faces);
145 
146  return cnt;
147 }
148 
149 
150 static void
151 nmg_dangling_handler(uint32_t *longp, void *state, int UNUSED(unused))
152 {
153  register struct faceuse *fu = (struct faceuse *)longp;
154  register struct dangling_faceuse_state *sp =
155  (struct dangling_faceuse_state *)state;
156 
157  NMG_CK_FACEUSE(fu);
158  if (fu->orientation != OT_SAME) return;
159  /* If this faceuse has been processed before, do nothing more */
160  if (!NMG_INDEX_FIRST_TIME(sp->visited, fu)) return;
161 
162  if (nmg_dangling_face(fu, sp->manifolds)) {
163  sp->count++;
164  }
165 }
166 
167 
168 /**
169  * Argument is expected to be model, region, shell, or faceuse
170  * pointer.
171  *
172  * Returns -
173  * 0 No dangling faces
174  * !0 Has dangling faces
175  */
176 int
177 nmg_has_dangling_faces(uint32_t *magic_p, const char *manifolds)
178 {
179  struct model *m;
180  struct dangling_faceuse_state st;
181  static const struct nmg_visit_handlers handlers = {NULL, NULL, NULL, NULL, NULL,
182  NULL, NULL, NULL, nmg_dangling_handler, NULL,
183  NULL, NULL, NULL, NULL, NULL,
184  NULL, NULL, NULL, NULL, NULL,
185  NULL, NULL, NULL, NULL, NULL};
186  /* handlers.bef_faceuse = nmg_dangling_handler; */
187 
188  m = nmg_find_model(magic_p);
189  NMG_CK_MODEL(m);
190  st.visited = (char *)bu_calloc(m->maxindex+1, sizeof(char), "visited[]");
191  st.manifolds = manifolds;
192  st.count = 0;
193 
194  nmg_visit(magic_p, &handlers, (void *)&st);
195 
196  bu_free((char *)st.visited, "visited[]");
197  return st.count;
198 }
199 
200 
201 /**
202  * Within a shell, show each loop as a separate display. Pause after
203  * displaying each one.
204  *
205  * Note that in "non-fancy" mode, show_broken_eu() draws just the
206  * edge.
207  */
208 void
209 nmg_show_each_loop(struct shell *s, char **classlist, int redraw, int fancy, const char *str)
210 
211 
212 /* non-zero means flush previous vlist */
213 /* non-zero means pause after the display */
214 {
215  struct faceuse *fu;
216  struct loopuse *lu;
217  char buf[128];
218  long save;
219 
220  NMG_CK_SHELL(s);
221  save = RTG.NMG_debug;
222  for (BU_LIST_FOR(fu, faceuse, &s->fu_hd)) {
223  NMG_CK_FACEUSE(fu);
224  if (fu->orientation == OT_OPPOSITE) continue;
225  for (BU_LIST_FOR(lu, loopuse, &fu->lu_hd)) {
226  NMG_CK_LOOPUSE(lu);
227  if (BU_LIST_FIRST_MAGIC(&lu->down_hd) == NMG_VERTEXUSE_MAGIC)
228  continue;
229  /* Display only OT_SAME, and OT_UNSPEC et.al. */
230  if (lu->orientation == OT_OPPOSITE) continue;
231 
232  snprintf(buf, 128, "%s=%p", str, (void *)lu);
233  nmg_show_broken_classifier_stuff(&lu->l.magic, classlist, redraw, fancy, buf);
234  }
235  }
236  for (BU_LIST_FOR(lu, loopuse, &s->lu_hd)) {
237  snprintf(buf, 128, "%s=%p (wire)", str, (void *)lu);
238  nmg_show_broken_classifier_stuff(&lu->l.magic, classlist, redraw, fancy, buf);
239  }
240  RTG.NMG_debug = save; /* restore it */
241 }
242 
243 
244 void
245 stash_shell(struct shell *s, char *file_name, char *title, const struct bn_tol *tol)
246 {
247  struct model *m;
248  struct nmgregion *r;
249  struct shell *new_s;
250  struct faceuse *fu;
251  char counted_name[256];
252 
253  m = nmg_mm();
254  r = nmg_mrsv(m);
255  new_s = BU_LIST_FIRST(shell, &r->s_hd);
256 
257  for (BU_LIST_FOR(fu, faceuse, &s->fu_hd)) {
258  if (fu->orientation != OT_SAME)
259  continue;
260 
261  (void)nmg_dup_face(fu, new_s);
262  }
263 
264  nmg_rebound(m, tol);
265  snprintf(counted_name, 256, "%s%d.g", file_name, debug_file_count);
266  nmg_stash_model_to_file(counted_name, m, title);
267  nmg_km(m);
268 }
269 
270 
271 void
272 nmg_kill_non_common_cracks(struct shell *sA, struct shell *sB)
273 {
274  struct faceuse *fu;
275  struct faceuse *fu_next;
276 
277  if (RTG.NMG_debug & DEBUG_BASIC)
278  bu_log("nmg_kill_non_common_cracks(s=%p and %p)\n", (void *)sA, (void *)sB);
279 
280  NMG_CK_SHELL(sA);
281  NMG_CK_SHELL(sB);
282 
283  fu = BU_LIST_FIRST(faceuse, &sA->fu_hd);
284  while (BU_LIST_NOT_HEAD(fu, &sA->fu_hd)) {
285  struct loopuse *lu;
286  struct loopuse *lu_next;
287  int empty_face=0;
288 
289  NMG_CK_FACEUSE(fu);
290 
291  fu_next = BU_LIST_PNEXT(faceuse, &fu->l);
292  while (BU_LIST_NOT_HEAD(fu_next, &sA->fu_hd)
293  && fu_next == fu->fumate_p)
294  fu_next = BU_LIST_PNEXT(faceuse, &fu_next->l);
295 
296  lu = BU_LIST_FIRST(loopuse, &fu->lu_hd);
297  while (BU_LIST_NOT_HEAD(lu, &fu->lu_hd)) {
298  struct edgeuse *eu;
299  struct edgeuse *eu_next;
300  int empty_loop=0;
301 
302  NMG_CK_LOOPUSE(lu);
303 
304  lu_next = BU_LIST_PNEXT(loopuse, &lu->l);
305 
306  if (BU_LIST_FIRST_MAGIC(&lu->down_hd) != NMG_EDGEUSE_MAGIC) {
307  lu = lu_next;
308  continue;
309  }
310 
311  crack_topA:
312  for (BU_LIST_FOR(eu, edgeuse, &lu->down_hd)) {
313  NMG_CK_EDGEUSE(eu);
314 
315  eu_next = BU_LIST_PNEXT_CIRC(edgeuse, &eu->l);
316  NMG_CK_EDGEUSE(eu_next);
317 
318  /* check if eu and eu_next form a jaunt */
319  if (eu->vu_p->v_p != eu_next->eumate_p->vu_p->v_p)
320  continue;
321 
322  /* check if vertex at apex is in other shell
323  * if so, we need this vertex, can't kill crack
324  */
325  if (nmg_find_v_in_shell(eu_next->vu_p->v_p, sB, 0))
326  continue;
327 
328  if (nmg_keu(eu))
329  empty_loop = 1;
330  else if (nmg_keu(eu_next))
331  empty_loop = 1;
332 
333  if (empty_loop)
334  break;
335 
336  goto crack_topA;
337  }
338  if (empty_loop) {
339  if (nmg_klu(lu)) {
340  empty_face = 1;
341  break;
342  }
343  }
344  lu = lu_next;
345  }
346  if (empty_face) {
347  if (nmg_kfu(fu)) {
348  break;
349  }
350  }
351  fu = fu_next;
352  }
353 
354  fu = BU_LIST_FIRST(faceuse, &sB->fu_hd);
355  while (BU_LIST_NOT_HEAD(fu, &sB->fu_hd)) {
356  struct loopuse *lu;
357  struct loopuse *lu_next;
358  int empty_face=0;
359 
360  NMG_CK_FACEUSE(fu);
361 
362  fu_next = BU_LIST_PNEXT(faceuse, &fu->l);
363  while (BU_LIST_NOT_HEAD(fu_next, &sB->fu_hd)
364  && fu_next == fu->fumate_p)
365  fu_next = BU_LIST_PNEXT(faceuse, &fu_next->l);
366 
367  lu = BU_LIST_FIRST(loopuse, &fu->lu_hd);
368  while (BU_LIST_NOT_HEAD(lu, &fu->lu_hd)) {
369  struct edgeuse *eu;
370  struct edgeuse *eu_next;
371  int empty_loop=0;
372 
373  NMG_CK_LOOPUSE(lu);
374 
375  lu_next = BU_LIST_PNEXT(loopuse, &lu->l);
376 
377  if (BU_LIST_FIRST_MAGIC(&lu->down_hd) != NMG_EDGEUSE_MAGIC) {
378  lu = lu_next;
379  continue;
380  }
381 
382  crack_top:
383  for (BU_LIST_FOR(eu, edgeuse, &lu->down_hd)) {
384  NMG_CK_EDGEUSE(eu);
385 
386  eu_next = BU_LIST_PNEXT_CIRC(edgeuse, &eu->l);
387  NMG_CK_EDGEUSE(eu_next);
388 
389  /* check if eu and eu_next form a jaunt */
390  if (eu->vu_p->v_p != eu_next->eumate_p->vu_p->v_p)
391  continue;
392 
393  /* check if crack apex is in other shell */
394  if (nmg_find_v_in_shell(eu_next->vu_p->v_p, sA, 0))
395  continue;
396 
397  if (nmg_keu(eu))
398  empty_loop = 1;
399  else if (nmg_keu(eu_next))
400  empty_loop = 1;
401 
402  if (empty_loop)
403  break;
404 
405  goto crack_top;
406  }
407  if (empty_loop) {
408  if (nmg_klu(lu)) {
409  empty_face = 1;
410  break;
411  }
412  }
413  lu = lu_next;
414  }
415  if (empty_face) {
416  if (nmg_kfu(fu)) {
417  break;
418  }
419  }
420  fu = fu_next;
421  }
422 }
423 
424 
425 /**
426  * Preprocessor routine for classifier to get all the easy shared
427  * edges and vertices marked as shared.
428  */
429 
430 static void
431 nmg_classify_shared_edges_verts(struct shell *sA, struct shell *sB, char **classlist)
432 {
433  struct bu_ptbl verts;
434  struct bu_ptbl edges;
435  int i;
436 
437  if (RTG.NMG_debug & DEBUG_CLASSIFY)
438  bu_log("nmg_classify_shared_edges_verts(sA=%p, sB=%p)\n", (void *)sA, (void *)sB);
439 
440  NMG_CK_SHELL(sA);
441  NMG_CK_SHELL(sB);
442 
443  nmg_vertex_tabulate(&verts, &sA->l.magic);
444  for (i=0; i<BU_PTBL_END(&verts); i++) {
445  struct vertex *v;
446  struct vertexuse *vu;
447 
448  v = (struct vertex *)BU_PTBL_GET(&verts, i);
449  NMG_CK_VERTEX(v);
450 
451  for (BU_LIST_FOR(vu, vertexuse, &v->vu_hd)) {
452  NMG_CK_VERTEXUSE(vu);
453 
454  if (nmg_find_s_of_vu(vu) == sB) {
455  /* set classification in both lists */
456  NMG_INDEX_SET(classlist[NMG_CLASS_AonBshared], v);
457  NMG_INDEX_SET(classlist[4 + NMG_CLASS_AonBshared], v);
458 
459  if (RTG.NMG_debug & DEBUG_CLASSIFY)
460  bu_log("nmg_classify_shared_edges_verts: v=%p is shared\n", (void *)v);
461 
462  break;
463  }
464  }
465  }
466  bu_ptbl_free(&verts);
467 
468  nmg_edge_tabulate(&edges, &sA->l.magic);
469  for (i=0; i<BU_PTBL_END(&edges); i++) {
470  struct edge *e;
471  struct edgeuse *eu;
472  struct edgeuse *eu_start;
473 
474  e = (struct edge *)BU_PTBL_GET(&edges, i);
475  NMG_CK_EDGE(e);
476 
477  eu_start = e->eu_p;
478  NMG_CK_EDGEUSE(eu_start);
479 
480  eu = eu_start;
481  do {
482  if (nmg_find_s_of_eu(eu) == sB) {
483  NMG_INDEX_SET(classlist[NMG_CLASS_AonBshared], e);
484  NMG_INDEX_SET(classlist[4 + NMG_CLASS_AonBshared], e);
485 
486  if (RTG.NMG_debug & DEBUG_CLASSIFY)
487  bu_log("nmg_classify_shared_edges_verts: e=%p is shared\n", (void *)e);
488 
489  break;
490  }
491 
492  eu = eu->eumate_p->radial_p;
493  NMG_CK_EDGEUSE(eu);
494 
495  } while (eu != eu_start && eu->eumate_p != eu_start);
496  }
497  bu_ptbl_free(&edges);
498 }
499 
500 
501 /**
502  * Look for same loop in opposite direction in shell "s", Kill them.
503  */
504 
505 void
506 nmg_kill_anti_loops(struct shell *s)
507 {
508  struct bu_ptbl loops;
509  struct faceuse *fu;
510  struct loopuse *lu;
511  register int i, j;
512 
513  bu_ptbl_init(&loops, 64, " &loops");
514 
515  for (BU_LIST_FOR(fu, faceuse, &s->fu_hd)) {
516 
517  if (fu->orientation != OT_SAME)
518  continue;
519 
520  for (BU_LIST_FOR(lu, loopuse, &fu->lu_hd)) {
521 
522  if (BU_LIST_FIRST_MAGIC(&lu->down_hd) != NMG_EDGEUSE_MAGIC)
523  continue;
524 
525  bu_ptbl_ins(&loops, (long *)lu);
526  }
527  }
528 
529  for (i=0; i < BU_PTBL_END(&loops); i++) {
530  struct loopuse *lu1;
531  struct edgeuse *eu1_start;
532  struct vertex *v1;
533 
534  lu1 = (struct loopuse *)BU_PTBL_GET(&loops, i);
535 
536  eu1_start = BU_LIST_FIRST(edgeuse, &lu1->down_hd);
537  v1 = eu1_start->vu_p->v_p;
538 
539  for (j=i+1; j<BU_PTBL_END(&loops); j++) {
540  register struct loopuse *lu2;
541  register struct edgeuse *eu1;
542  register struct edgeuse *eu2;
543  register struct vertexuse *vu2;
544  register struct faceuse *fu1, *fu2;
545  int anti=1;
546 
547  lu2 = (struct loopuse *)BU_PTBL_GET(&loops, j);
548 
549  /* look for v1 in lu2 */
550  vu2 = nmg_find_vertex_in_lu(v1, lu2);
551 
552  if (!vu2)
553  continue;
554 
555  /* found common vertex, now look for the rest */
556  eu2 = vu2->up.eu_p;
557  eu1 = eu1_start;
558  do {
559  eu2 = BU_LIST_PNEXT_CIRC(edgeuse, &eu2->l);
560  eu1 = BU_LIST_PPREV_CIRC(edgeuse, &eu1->l);
561 
562  if (eu2->vu_p->v_p != eu1->vu_p->v_p) {
563  anti = 0;
564  break;
565  }
566  } while (eu1 != eu1_start);
567 
568  if (!anti)
569  continue;
570 
571  fu1 = lu1->up.fu_p;
572  fu2 = lu2->up.fu_p;
573 
574  if (fu1 == fu2)
575  continue;
576 
577  /* remove from loops prior to kill so we don't pass around
578  * free'd pointers. fine for ptbl, but misleading.
579  */
580  bu_ptbl_rm(&loops, (long *)lu1);
581  bu_ptbl_rm(&loops, (long *)lu2);
582 
583  if (nmg_klu(lu1)) {
584  if (nmg_kfu(fu1))
585  goto out;
586  }
587  if (nmg_klu(lu2)) {
588  if (nmg_kfu(fu2))
589  goto out;
590  }
591 
592  i--;
593  break;
594  }
595  }
596 out:
597  bu_ptbl_free(&loops);
598 }
599 
600 
601 void
602 nmg_kill_wire_edges(struct shell *s)
603 {
604  struct loopuse *lu;
605  struct edgeuse *eu;
606 
607  while (BU_LIST_NON_EMPTY(&s->lu_hd)) {
608  lu = BU_LIST_FIRST(loopuse, &s->lu_hd);
609  nmg_klu(lu);
610  }
611 
612  while (BU_LIST_NON_EMPTY(&s->eu_hd)) {
613  eu = BU_LIST_FIRST(edgeuse, &s->eu_hd);
614  nmg_keu(eu);
615  }
616 }
617 
618 
619 /**
620  * Perform boolean operations on a pair of shells.
621  *
622  * The return is an updated version of shell A. Shell B is destroyed.
623  *
624  * XXX this probably should operate on regions, not shells.
625  */
626 static struct shell * nmg_bool(struct shell *sA, struct shell *sB, const int oper, const struct bn_tol *tol)
627 {
628  int i;
629  long nelem;
630  char *classlist[8];
631  FILE *fd, *fp;
632  struct model *m;
633  struct nmgregion *rA;
634  struct nmgregion *rB;
635 
636  NMG_CK_SHELL(sA);
637  NMG_CK_SHELL(sB);
638  rA = sA->r_p;
639  rB = sB->r_p;
640  NMG_CK_REGION(rA);
641  NMG_CK_REGION(rB);
642  m = rA->m_p;
643  NMG_CK_MODEL(m);
644 
645  if (sA->r_p->m_p != sB->r_p->m_p) {
646  bu_bomb("nmg_bool(): internal error, both shells are not in the same nmg model\n");
647  }
648 
649  /* for the simple case where shells sA and sB are disjoint by at
650  * least distance tolerance, we can skip most of the steps to
651  * perform the boolean operation
652  */
653  if (V3RPP_DISJOINT_TOL(sA->sa_p->min_pt, sA->sa_p->max_pt,
654  sB->sa_p->min_pt, sB->sa_p->max_pt, tol->dist)) {
655  switch (oper) {
656  case NMG_BOOL_ADD: {
657  struct faceuse *fu;
658  vect_t s_min_pt;
659  vect_t s_max_pt;
660 
661  /* find new sA shell bounding box which combines the
662  * bounding boxes of shells sA and sB.
663  */
664  VSETALL(s_min_pt, MAX_FASTF);
665  VSETALL(s_max_pt, -MAX_FASTF);
666  VMIN(s_min_pt, sA->sa_p->min_pt);
667  VMIN(s_min_pt, sB->sa_p->min_pt);
668  VMIN(s_min_pt, sB->sa_p->max_pt);
669  VMAX(s_max_pt, sA->sa_p->max_pt);
670  VMAX(s_max_pt, sB->sa_p->min_pt);
671  VMAX(s_max_pt, sB->sa_p->max_pt);
672 
673  /* move all the faceuse from shell sB to shell sA */
674  for (BU_LIST_FOR(fu, faceuse, &sB->fu_hd)) {
675  fu->s_p = sA;
676  }
677  BU_LIST_APPEND_LIST(&(sA->fu_hd), &(sB->fu_hd));
678 
679  /* assign new bounding box to sA */
680  VMOVE(sA->sa_p->min_pt, s_min_pt);
681  VMOVE(sA->sa_p->max_pt, s_max_pt);
682 
683  /* kill shell sB */
684  nmg_ks(sB);
685  break;
686  }
687  case NMG_BOOL_SUB:
688  /* kill shell sB */
689  nmg_ks(sB);
690  break;
691  case NMG_BOOL_ISECT:
692  /* kill the contents of shell sA */
693  while (BU_LIST_NON_EMPTY(&sA->fu_hd)) {
694  (void)nmg_kfu(BU_LIST_FIRST(faceuse, &sA->fu_hd));
695  }
696  while (BU_LIST_NON_EMPTY(&sA->lu_hd)) {
697  (void)nmg_klu(BU_LIST_FIRST(loopuse, &sA->lu_hd));
698  }
699  while (BU_LIST_NON_EMPTY(&sA->eu_hd)) {
700  (void)nmg_keu(BU_LIST_FIRST(edgeuse, &sA->eu_hd));
701  }
702  if (sA->vu_p) {
703  nmg_kvu(sA->vu_p);
704  }
705  /* kill shell sB */
706  nmg_ks(sB);
707  break;
708  default:
709  bu_bomb("nmg_bool(): unknown operation\n");
710  }
711  return sA;
712  }
713 
715  if (RTG.NMG_debug & DEBUG_VERIFY) {
716  /* Sometimes the tessellations of non-participating regions
717  * are damaged during a boolean operation. Check everything.
718  */
719  nmg_vmodel(m);
720  }
721 
722  nmg_shell_coplanar_face_merge(sA, tol, 1);
723  nmg_shell_coplanar_face_merge(sB, tol, 1);
724 
725  nmg_model_fuse(m, tol);
726 
727  if (nmg_check_closed_shell(sA, tol)) {
728  if (RTG.NMG_debug & DEBUG_BOOL &&
729  RTG.NMG_debug & DEBUG_PLOTEM) {
730  if ((fp=fopen("Unclosed.plot3", "wb")) != (FILE *)NULL) {
731  bu_log("Plotting unclosed NMG shell\n");
732  nmg_pl_s(fp, sA);
733  fclose(fp);
734  }
735  }
736  if (RTG.NMG_debug & DEBUG_BOOL)
737  nmg_pr_s(sA, "");
738 
739  bu_log("nmg_bool: sA is unclosed, barging ahead\n");
740  }
741 
742  if (nmg_check_closed_shell(sB, tol)) {
743  if (RTG.NMG_debug & DEBUG_BOOL &&
744  RTG.NMG_debug & DEBUG_PLOTEM) {
745  if ((fp=fopen("Unclosed.plot3", "wb")) != (FILE *)NULL) {
746  bu_log("Plotting unclosed NMG shell\n");
747  nmg_pl_s(fp, sB);
748  fclose(fp);
749  }
750  }
751  if (RTG.NMG_debug & DEBUG_BOOL)
752  nmg_pr_s(sB, "");
753  bu_log("nmg_bool: sB is unclosed, barging ahead\n");
754  }
755 
756 
757  if (RTG.NMG_debug & DEBUG_BOOL && RTG.NMG_debug & DEBUG_PLOTEM) {
758  if ((fp=fopen("shellA.plot3", "wb")) == (FILE*)NULL) {
759  (void)perror("shellA.plot3");
760  bu_bomb("unable to open shellA.plot3 for writing");
761  }
762  bu_log("plotting shellA.plot3\n");
763  nmg_pl_s(fp, sA);
764  fclose(fp);
765 
766  if ((fp=fopen("shellB.plot3", "wb")) == (FILE*)NULL) {
767  (void)perror("shellB.plot3");
768  bu_bomb("unable to open shellB.plot3 for writing");
769  }
770  bu_log("plotting shellB.plot3\n");
771  nmg_pl_s(fp, sB);
772  fclose(fp);
773  }
774 
775  if (RTG.NMG_debug & DEBUG_VERIFY) {
776  /* Sometimes the tessellations of non-participating regions
777  * are damaged during a boolean operation. Check everything.
778  */
779  nmg_vmodel(m);
780  }
781 
782  if (RTG.NMG_debug & DEBUG_BOOL) {
783  char file_name[256];
784 
785  sprintf(file_name, "before%d.g", debug_file_count);
786  nmg_stash_model_to_file(file_name, m, "Before crackshells");
787  }
788 
789  /* Perform shell/shell intersections */
790  nmg_crackshells(sA, sB, tol);
791 
792  if (RTG.NMG_debug & DEBUG_BOOL) {
793  stash_shell(sA, "a1_", "sA", tol);
794  stash_shell(sB, "b1_", "sB", tol);
795  bu_log("Just After Crackshells:\nShell A:\n");
796  nmg_pr_s_briefly(sA, 0);
797  bu_log("Just After Crackshells:\nShell B:\n");
798  nmg_pr_s_briefly(sB, 0);
799  }
800 
801  (void)nmg_vertex_fuse(&m->magic, tol);
802 
803  (void)nmg_kill_anti_loops(sA);
804  (void)nmg_kill_anti_loops(sB);
805 
806  /* clean things up now that the intersections have been built */
807  nmg_sanitize_s_lv(sA, OT_BOOLPLACE);
808  nmg_sanitize_s_lv(sB, OT_BOOLPLACE);
809 
810  /* Separate any touching loops, so classifier does not have any
811  * really complex loops to do.
812  * In particular, it is necessary for (r410) to make
813  * interior (touching) loop segments into true interior loops
814  * that are separate from the exterior loop,
815  * so the classifier can assess each separately.
816  */
817  nmg_s_split_touchingloops(sA, tol);
818  nmg_s_split_touchingloops(sB, tol);
819 
820  (void)nmg_kill_cracks(sA);
821  (void)nmg_kill_cracks(sB);
822 
823  /* eliminate unnecessary breaks in edges */
824  (void)nmg_simplify_shell_edges(sA, tol);
825  (void)nmg_simplify_shell_edges(sB, tol);
826 
827  (void)nmg_break_e_on_v(&m->magic, tol);
828 
829  (void)nmg_edge_fuse(&m->magic, tol);
830 
831  if (RTG.NMG_debug & DEBUG_VERIFY) {
832  /* Sometimes the tessellations of non-participating regions
833  * are damaged during a boolean operation. Check everything.
834  */
835  nmg_vmodel(m);
836  if ((i = nmg_model_fuse(m, tol)) > 0) {
837  bu_log("NOTICE: nmg_bool: fused %d entities while cracking shells\n", i);
838  bu_bomb("nmg_bool() entities unfused after nmg_crackshells()\n");
839  }
840  }
841 
842  if (RTG.NMG_debug & DEBUG_BOOL) {
843  int dangle_error = 0;
844  if (nmg_has_dangling_faces((uint32_t *)rA, (char *)NULL)) {
845  dangle_error = 1;
846  bu_log("nmg_bool(): Dangling faces detected in rA before classification\n");
847  }
848  if (nmg_has_dangling_faces((uint32_t *)rB, (char *)NULL)) {
849  dangle_error = 1;
850  bu_log("nmg_bool(): Dangling faces detected in rB before classification\n");
851  }
852  if (nmg_has_dangling_faces((uint32_t *)m, (char *)NULL)) {
853  dangle_error = 1;
854  bu_log("nmg_bool(): Dangling faces detected in model before classification\n");
855  }
856  if (dangle_error) {
857  nmg_stash_model_to_file("dangle.g", m, "After Boolean");
858  bu_bomb("nmg_bool(): Dangling faces detected before classification\n");
859  }
860  }
861 
862  if (RTG.NMG_debug & DEBUG_VERIFY) {
863  /* Sometimes the tessellations of non-participating regions
864  * are damaged during a boolean operation. Check everything.
865  */
866  nmg_vmodel(m);
867  }
868 
869  /*
870  * Before splitting, join up small loop fragments into large
871  * ones, so that maximal splits will be possible.
872  * This is essential for cutting holes in faces, e.g. Test3.r
873  */
874  if (RTG.NMG_debug & DEBUG_BOOL) {
875  char file_name[256];
876 
877  sprintf(file_name, "notjoined%d.g", debug_file_count);
878  nmg_stash_model_to_file(file_name, m, "Before s_join_touchingloops");
879  }
880 
881  /* Re-build bounding boxes, edge geometry, as needed. */
882  nmg_shell_a(sA, tol);
883  nmg_shell_a(sB, tol);
884 
885  if (RTG.NMG_debug & DEBUG_BOOL) {
886  stash_shell(sA, "a", "sA", tol);
887  stash_shell(sB, "b", "sB", tol);
888 
889  bu_log("sA:\n");
890  nmg_pr_s_briefly(sA, 0);
891  bu_log("sB:\n");
892  nmg_pr_s_briefly(sB, 0);
893  }
894 
895  if (RTG.NMG_debug & DEBUG_BOOL) {
896  char file_name[256];
897 
898  sprintf(file_name, "after%d.g", debug_file_count);
899  nmg_stash_model_to_file(file_name, m, "After crackshells");
900  }
901 
902  if (RTG.NMG_debug & DEBUG_BOOL) {
903  if (RTG.NMG_debug & DEBUG_PLOTEM) {
904  if ((fd = fopen("Cracked_Shells.plot3", "wb")) == (FILE *)NULL) {
905  (void)perror("Cracked_Shells");
906  bu_bomb("unable to open Cracked_Shells.plot3 for writing");
907  }
908  bu_log("plotting Cracked_Shells.plot3\n");
909 
910  nmg_pl_s(fd, sA);
911  nmg_pl_s(fd, sB);
912  (void)fclose(fd);
913 
914  nmg_pl_isect("isectA.plot3", sA, tol);
915  nmg_pl_isect("isectB.plot3", sB, tol);
916  }
917 
918  bu_log("check 2\n");
919  }
920 
921  if (nmg_ck_closed_surf(sA, tol))
922  bu_log("nmg_bool() WARNING: sA unclosed before classification. Boldly pressing on.\n");
923  if (nmg_ck_closed_surf(sB, tol))
924  bu_log("nmg_bool() WARNING: sB unclosed before classification. Boldly pressing on.\n");
925 
926  if (RTG.NMG_debug & DEBUG_VERIFY) {
927  /* Sometimes the tessellations of non-participating regions
928  * are damaged during a boolean operation. Check everything.
929  */
930  nmg_vmodel(m);
931  }
932 
933  nmg_m_reindex(m, 0);
934 
935  /* Allocate storage for classlist[]. Allocate each of the 8 class
936  * lists one at a time. This will assist with debugging to
937  * determine if each array read/write is within its allocated space.
938  */
939  nelem = m->maxindex;
940  for (i = 0; i < 8; i++) {
941  classlist[i] = (char *)bu_calloc(nelem, sizeof(char), "nmg_bool classlist");
942  }
943 
944  nmg_classify_shared_edges_verts(sA, sB, classlist);
945 
947  if (RTG.NMG_debug & (DEBUG_GRAPHCL|DEBUG_PL_LOOP)) {
948  nmg_show_broken_classifier_stuff((uint32_t *)sA, &classlist[0], nmg_class_nothing_broken, 1, "unclassed sA");
949  nmg_show_broken_classifier_stuff((uint32_t *)sB, &classlist[4], 1, 1, "unclassed sB");
950  }
951 
952  if (m->manifolds) {
953  bu_free((char *)m->manifolds, "free manifolds table");
954  m->manifolds = (char *)NULL;
955  }
956  m->manifolds = nmg_manifolds(m);
957 
958  /*
959  * Classify A -vs- B, then B -vs- A.
960  * Carry onAonBshared and onAonBanti classifications forward
961  * from first step to second step.
962  * A -vs- B live in classlist[0..3], B -vs- A live in classlist[4..7].
963  */
964  nmg_class_shells(sA, sB, &classlist[0], tol);
965  memcpy((char *)classlist[4+NMG_CLASS_AonBshared],
966  (char *)classlist[0+NMG_CLASS_AonBshared],
967  nelem*sizeof(char));
968  memcpy((char *)classlist[4+NMG_CLASS_AonBanti],
969  (char *)classlist[0+NMG_CLASS_AonBanti],
970  nelem*sizeof(char));
971  memcpy((char *)classlist[4+NMG_CLASS_AoutB],
972  (char *)classlist[0+NMG_CLASS_AoutB],
973  nelem*sizeof(char));
974  nmg_class_shells(sB, sA, &classlist[4], tol);
975 
976  if (m->manifolds) {
977  bu_free((char *)m->manifolds, "free manifolds table");
978  m->manifolds = (char *)NULL;
979  }
980 
981  if (RTG.NMG_debug & (DEBUG_GRAPHCL|DEBUG_PL_LOOP)) {
983 
984  /* Show each loop, one at a time, non-fancy */
985  /* XXX Should have its own bit, or combination -- not always wanted */
986  nmg_show_each_loop(sA, &classlist[0], 1, 0, "sA lu");
987  nmg_show_each_loop(sB, &classlist[4], 1, 0, "sB lu");
988 
989  /* Show each shell as a whole */
990  nmg_show_broken_classifier_stuff((uint32_t *)sA, &classlist[0], 1, 0, "sA classed");
991  nmg_show_broken_classifier_stuff((uint32_t *)sB, &classlist[4], 1, 0, "sB classed");
992  }
993 
994  if (RTG.NMG_debug & DEBUG_BOOL) {
995  bu_log("Just before nmg_evaluate_boolean:\nShell A:\n");
996  nmg_pr_s_briefly(sA, 0);
997  bu_log("Shell B:\n");
998  nmg_pr_s_briefly(sB, 0);
999  }
1000 
1001  nmg_s_radial_check(sA, tol);
1002  nmg_s_radial_check(sB, tol);
1003  nmg_evaluate_boolean(sA, sB, oper, classlist, tol);
1004  sB = NULL; /* sanity, killed during boolean eval */
1005 
1006  if (RTG.NMG_debug & DEBUG_BOOL) {
1007  bu_log("Just after nmg_evaluate_boolean:\nShell A:\n");
1008  nmg_pr_s_briefly(sA, 0);
1009  bu_log("Shell B:\nFreed.");
1010  }
1011 
1012  if (RTG.NMG_debug & DEBUG_VERIFY) {
1013  nmg_vmodel(m);
1014  if ((i = nmg_model_fuse(m, tol)) > 0) {
1015  bu_log("ERROR: nmg_bool: fused %d entities after BOOLEAN. Isect bug.\n", i);
1016  bu_bomb("nmg_bool() entities unfused after nmg_evaluate_boolean()\n");
1017  }
1018  }
1019 
1020  /*
1021  * nmg_evaluate_boolean() may return an invalid shell, i.e., one
1022  * that has absolutely nothing in it. This is an indication that
1023  * the shell should be deleted from the region, an operation which
1024  * can not be accomplished this far down in the subroutine tree.
1025  */
1026  if (!nmg_shell_is_empty(sA)) {
1027 
1028  nmg_s_radial_check(sA, tol);
1029 
1030  if (RTG.NMG_debug & DEBUG_BOOL) {
1031  int dangle_error = 0;
1032  if (nmg_has_dangling_faces((uint32_t *)rA, (char *)NULL)) {
1033  dangle_error = 1;
1034  bu_log("nmg_bool(): Dangling faces detected in rA after boolean\n");
1035  }
1036  if (nmg_has_dangling_faces((uint32_t *)rB, (char *)NULL)) {
1037  dangle_error = 1;
1038  bu_log("nmg_bool(): Dangling faces detected in rB after boolean\n");
1039  }
1040  if (nmg_has_dangling_faces((uint32_t *)m, (char *)NULL)) {
1041  dangle_error = 1;
1042  bu_log("nmg_bool(): Dangling faces detected in m after boolean\n");
1043  }
1044  if (dangle_error) {
1045  nmg_stash_model_to_file("dangle.g", m, "After Boolean");
1046  bu_bomb("nmg_bool(): Dangling faces detected after boolean\n");
1047  }
1048  } else {
1049  if (nmg_has_dangling_faces((uint32_t *)rA, (char *)NULL)) {
1050  (void)nmg_plot_open_edges((const uint32_t *)rA, "open_edges");
1051  bu_bomb("nmg_bool(): Dangling faces detected in rA after boolean\n");
1052  }
1053  }
1054 
1055  /* Do this before table size changes */
1056  if (RTG.NMG_debug & (DEBUG_GRAPHCL|DEBUG_PL_LOOP)) {
1058 
1059  /* Show final result of the boolean */
1060  nmg_show_broken_classifier_stuff((uint32_t *)sA, &classlist[0], 1, 0, "sA result");
1061  }
1062 
1063  /* Go back and combine loops of faces together wherever
1064  * possible to reduce the loop/edge count.
1065  */
1066  nmg_simplify_shell(sA);
1067  if (RTG.NMG_debug & DEBUG_VERIFY)
1068  nmg_vshell(&rA->s_hd, rA);
1069 
1070  (void) nmg_unbreak_region_edges(&sA->l.magic);
1071 
1072  if (RTG.NMG_debug & DEBUG_BOOL) {
1073  bu_log("Just after nmg_simplify_shell:\nShell A:\n");
1074  nmg_pr_s_briefly(sA, 0);
1075  }
1076 
1077  /* Bounding boxes may have changed */
1078  nmg_shell_a(sA, tol);
1079 
1080  if (nmg_ck_closed_surf(sA, tol)) {
1081  if (RTG.NMG_debug)
1082  bu_log("nmg_bool() WARNING: sA unclosed at return, barging on.\n");
1083  else
1084  bu_bomb("nmg_bool() sA unclosed at return, aborting.\n");
1085  }
1086  nmg_s_radial_check(sA, tol);
1087 
1088  if (RTG.NMG_debug & DEBUG_BOOL) {
1089  char tmp_name[256];
1090  sprintf(tmp_name, "after_bool_%d.g", debug_file_count);
1091  nmg_stash_model_to_file(tmp_name, m, "After Boolean");
1092  }
1093  }
1094 
1095  for (i = 0; i < 8; i++) {
1096  bu_free((char *)classlist[i], "nmg_bool classlist");
1097  }
1098 
1099  if (RTG.NMG_debug & DEBUG_BOOL) {
1100  bu_log("Returning from NMG_BOOL\n");
1101  }
1102  if (RTG.NMG_debug & DEBUG_VERIFY) {
1103  /* Sometimes the tessellations of non-participating regions
1104  * are damaged during a boolean operation. Check everything.
1105  */
1106  nmg_vmodel(m);
1107  }
1108 
1109  nmg_kill_wire_edges(sA);
1110 
1111  return sA;
1112 }
1113 
1114 
1115 /**
1116  * BUG: we assume only one shell per region
1117  */
1118 struct nmgregion *
1119 nmg_do_bool(struct nmgregion *rA, struct nmgregion *rB, const int oper, const struct bn_tol *tol)
1120 {
1121  struct shell *s;
1122  struct nmgregion *r;
1123 
1124  NMG_CK_REGION(rA);
1125  NMG_CK_REGION(rB);
1126 
1127  nmg_region_v_unique(rA, tol);
1128  nmg_region_v_unique(rB, tol);
1129 
1130  s = nmg_bool(BU_LIST_FIRST(shell, &rA->s_hd),
1131  BU_LIST_FIRST(shell, &rB->s_hd),
1132  oper, tol);
1133  r = s->r_p;
1134 
1135  /* shell B was destroyed, need to eliminate region B */
1136  nmg_kr(rB);
1137 
1138  NMG_CK_SHELL(s);
1139  NMG_CK_REGION(r);
1140 
1141  /* If shell A became empty, eliminate it from the returned region */
1142  if (nmg_shell_is_empty(s)) {
1143  nmg_ks(s);
1144  if (BU_LIST_NON_EMPTY(&r->s_hd)) {
1145  bu_bomb("nmg_do_bool: Result of Boolean is an empty shell, but region is not empty!!!\n");
1146  }
1147  nmg_kr(r);
1148  return (struct nmgregion *)NULL;
1149  }
1150 
1151  return r;
1152 }
1153 
1154 
1155 /* XXX move to ??? Everything from here on down needs to go into another module */
1156 
1157 
1158 /**
1159  * Called from db_walk_tree() each time a tree leaf is encountered.
1160  * The primitive solid, in external format, is provided in 'ep', and
1161  * the type of that solid (e.g. ID_ELL) is in 'id'. The full tree
1162  * state including the accumulated transformation matrix and the
1163  * current tolerancing is in 'tsp', and the full path from root to
1164  * leaf is in 'pathp'.
1165  *
1166  * Import the solid, tessellate it into an NMG, stash a pointer to the
1167  * tessellation in a new tree structure (union), and return a pointer
1168  * to that.
1169  *
1170  * Usually given as an argument to, and called from db_walk_tree().
1171  *
1172  * This routine must be prepared to run in parallel.
1173  */
1174 union tree *
1175 nmg_booltree_leaf_tess(struct db_tree_state *tsp, const struct db_full_path *pathp, struct rt_db_internal *ip, void *UNUSED(client_data))
1176 {
1177  struct model *m;
1178  struct nmgregion *r1 = (struct nmgregion *)NULL;
1179  union tree *curtree;
1180  struct directory *dp;
1181 
1182  if (!tsp || !pathp || !ip)
1183  return TREE_NULL;
1184 
1185  RT_CK_DB_INTERNAL(ip);
1186  RT_CK_FULL_PATH(pathp);
1187  dp = DB_FULL_PATH_CUR_DIR(pathp);
1188  RT_CK_DIR(dp);
1189 
1190  if (!ip->idb_meth || !ip->idb_meth->ft_tessellate) {
1191  bu_log("ERROR(%s): tessellation support not available\n", dp->d_namep);
1192  return TREE_NULL;
1193  }
1194 
1195  NMG_CK_MODEL(*tsp->ts_m);
1196  BN_CK_TOL(tsp->ts_tol);
1197  RT_CK_TESS_TOL(tsp->ts_ttol);
1198  RT_CK_RESOURCE(tsp->ts_resp);
1199 
1200  m = nmg_mm();
1201 
1202  if (ip->idb_meth->ft_tessellate(&r1, m, ip, tsp->ts_ttol, tsp->ts_tol) < 0) {
1203  bu_log("ERROR(%s): tessellation failure\n", dp->d_namep);
1204  return TREE_NULL;
1205  }
1206 
1207  NMG_CK_REGION(r1);
1208  if (RTG.NMG_debug & DEBUG_VERIFY) {
1209  nmg_vshell(&r1->s_hd, r1);
1210  }
1211 
1212  RT_GET_TREE(curtree, tsp->ts_resp);
1213  curtree->tr_op = OP_NMG_TESS;
1214  curtree->tr_d.td_name = bu_strdup(dp->d_namep);
1215  curtree->tr_d.td_r = r1;
1216 
1218  bu_log("nmg_booltree_leaf_tess(%s) OK\n", dp->d_namep);
1219 
1220  return curtree;
1221 }
1222 
1223 
1224 /**
1225  * Called from db_walk_tree() each time a tree leaf is encountered.
1226  * The primitive solid, in external format, is provided in 'ep', and
1227  * the type of that solid (e.g. ID_ELL) is in 'id'. The full tree
1228  * state including the accumulated transformation matrix and the
1229  * current tolerancing is in 'tsp', and the full path from root to
1230  * leaf is in 'pathp'.
1231  *
1232  * Import the solid, convert it into an NMG using t-NURBS, stash a
1233  * pointer in a new tree structure (union), and return a pointer to
1234  * that.
1235  *
1236  * Usually given as an argument to, and called from db_walk_tree().
1237  *
1238  * This routine must be prepared to run in parallel.
1239  */
1240 union tree *
1241 nmg_booltree_leaf_tnurb(struct db_tree_state *tsp, const struct db_full_path *pathp, struct rt_db_internal *ip, void *UNUSED(client_data))
1242 {
1243  struct nmgregion *r1;
1244  union tree *curtree;
1245  struct directory *dp;
1246 
1247  NMG_CK_MODEL(*tsp->ts_m);
1248  BN_CK_TOL(tsp->ts_tol);
1249  RT_CK_TESS_TOL(tsp->ts_ttol);
1250  RT_CK_DB_INTERNAL(ip);
1251  RT_CK_RESOURCE(tsp->ts_resp);
1252 
1253  RT_CK_FULL_PATH(pathp);
1254  dp = DB_FULL_PATH_CUR_DIR(pathp);
1255  RT_CK_DIR(dp);
1256 
1257  if (ip->idb_meth->ft_tnurb(
1258  &r1, *tsp->ts_m, ip, tsp->ts_tol) < 0) {
1259  bu_log("nmg_booltree_leaf_tnurb(%s): CSG to t-NURB conversation failure\n", dp->d_namep);
1260  return TREE_NULL;
1261  }
1262 
1263  NMG_CK_REGION(r1);
1264  if (RTG.NMG_debug & DEBUG_VERIFY) {
1265  nmg_vshell(&r1->s_hd, r1);
1266  }
1267 
1268  RT_GET_TREE(curtree, tsp->ts_resp);
1269  curtree->tr_op = OP_NMG_TESS;
1270  curtree->tr_d.td_name = bu_strdup(dp->d_namep);
1271  curtree->tr_d.td_r = r1;
1272 
1274  bu_log("nmg_booltree_leaf_tnurb(%s) OK\n", dp->d_namep);
1275 
1276  return curtree;
1277 }
1278 
1279 
1280 /* quell the output of nmg_booltree_evaluate() to bu_log. */
1282 
1283 /**
1284  * Given a tree of leaf nodes tessellated earlier by
1285  * nmg_booltree_leaf_tess(), use recursion to do a depth-first
1286  * traversal of the tree, evaluating each pair of boolean operations
1287  * and reducing that result to a single nmgregion.
1288  *
1289  * Usually called from a do_region_end() handler from db_walk_tree().
1290  * For an example of several, see mged/dodraw.c.
1291  *
1292  * Returns an OP_NMG_TESS union tree node, which will contain the
1293  * resulting region and its name, as a dynamic string. The caller is
1294  * responsible for releasing the string, and the node, by calling
1295  * db_free_tree() on the node.
1296  *
1297  * It is *essential* that the caller call nmg_model_fuse() before
1298  * calling this subroutine.
1299  *
1300  * Returns NULL if there is no geometry to return.
1301  *
1302  * Typical calls will be of this form:
1303  * (void)nmg_model_fuse(m, tol);
1304  * curtree = nmg_booltree_evaluate(curtree, tol);
1305  */
1306 union tree *
1307 nmg_booltree_evaluate(register union tree *tp, const struct bn_tol *tol, struct resource *resp)
1308 {
1309  union tree *tl;
1310  union tree *tr;
1311  struct nmgregion *reg;
1312  int op = NMG_BOOL_ADD; /* default value */
1313  const char *op_str = " u "; /* default value */
1314  size_t rem;
1315  char *name;
1316 
1317  RT_CK_TREE(tp);
1318  BN_CK_TOL(tol);
1319  RT_CK_RESOURCE(resp);
1320 
1321  switch (tp->tr_op) {
1322  case OP_NOP:
1323  return TREE_NULL;
1324  case OP_NMG_TESS:
1325  /* Hit a tree leaf */
1326  if (RTG.NMG_debug & DEBUG_VERIFY) {
1327  nmg_vshell(&tp->tr_d.td_r->s_hd, tp->tr_d.td_r);
1328  }
1329  return tp;
1330  case OP_UNION:
1331  op = NMG_BOOL_ADD;
1332  op_str = " u ";
1333  break;
1334  case OP_INTERSECT:
1335  op = NMG_BOOL_ISECT;
1336  op_str = " + ";
1337  break;
1338  case OP_SUBTRACT:
1339  op = NMG_BOOL_SUB;
1340  op_str = " - ";
1341  break;
1342  default:
1343  bu_bomb("nmg_booltree_evaluate(): bad op\n");
1344  }
1345 
1346  /* Handle a boolean operation node. First get its leaves. */
1347  tl = nmg_booltree_evaluate(tp->tr_b.tb_left, tol, resp);
1348  tr = nmg_booltree_evaluate(tp->tr_b.tb_right, tol, resp);
1349 
1350  if (tl) {
1351  RT_CK_TREE(tl);
1352  if (tl != tp->tr_b.tb_left) {
1353  bu_bomb("nmg_booltree_evaluate(): tl != tp->tr_b.tb_left\n");
1354  }
1355  }
1356  if (tr) {
1357  RT_CK_TREE(tr);
1358  if (tr != tp->tr_b.tb_right) {
1359  bu_bomb("nmg_booltree_evaluate(): tr != tp->tr_b.tb_right\n");
1360  }
1361  }
1362 
1363  if (!tl && !tr) {
1364  /* left-r == null && right-r == null */
1365  RT_CK_TREE(tp);
1366  db_free_tree(tp->tr_b.tb_left, resp);
1367  db_free_tree(tp->tr_b.tb_right, resp);
1368  tp->tr_op = OP_NOP;
1369  return TREE_NULL;
1370  }
1371 
1372  if (tl && !tr) {
1373  /* left-r != null && right-r == null */
1374  RT_CK_TREE(tp);
1375  db_free_tree(tp->tr_b.tb_right, resp);
1376  if (op == NMG_BOOL_ISECT) {
1377  /* OP_INTERSECT '+' */
1378  RT_CK_TREE(tp);
1379  db_free_tree(tl, resp);
1380  tp->tr_op = OP_NOP;
1381  return TREE_NULL;
1382  } else {
1383  /* copy everything from tl to tp no matter which union type
1384  * could probably have done a mem-copy
1385  */
1386  tp->tr_op = tl->tr_op;
1387  tp->tr_b.tb_regionp = tl->tr_b.tb_regionp;
1388  tp->tr_b.tb_left = tl->tr_b.tb_left;
1389  tp->tr_b.tb_right = tl->tr_b.tb_right;
1390 
1391  /* null data from tl so only to free this node */
1392  tl->tr_b.tb_regionp = (struct region *)NULL;
1393  tl->tr_b.tb_left = TREE_NULL;
1394  tl->tr_b.tb_right = TREE_NULL;
1395 
1396  db_free_tree(tl, resp);
1397  return tp;
1398  }
1399  }
1400 
1401  if (!tl && tr) {
1402  /* left-r == null && right-r != null */
1403  RT_CK_TREE(tp);
1404  db_free_tree(tp->tr_b.tb_left, resp);
1405  if (op == NMG_BOOL_ADD) {
1406  /* OP_UNION 'u' */
1407  /* copy everything from tr to tp no matter which union type
1408  * could probably have done a mem-copy
1409  */
1410  tp->tr_op = tr->tr_op;
1411  tp->tr_b.tb_regionp = tr->tr_b.tb_regionp;
1412  tp->tr_b.tb_left = tr->tr_b.tb_left;
1413  tp->tr_b.tb_right = tr->tr_b.tb_right;
1414 
1415  /* null data from tr so only to free this node */
1416  tr->tr_b.tb_regionp = (struct region *)NULL;
1417  tr->tr_b.tb_left = TREE_NULL;
1418  tr->tr_b.tb_right = TREE_NULL;
1419 
1420  db_free_tree(tr, resp);
1421  return tp;
1422 
1423  } else if ((op == NMG_BOOL_SUB) || (op == NMG_BOOL_ISECT)) {
1424  /* for sub and intersect, if left-hand-side is null, result is null */
1425  RT_CK_TREE(tp);
1426  db_free_tree(tr, resp);
1427  tp->tr_op = OP_NOP;
1428  return TREE_NULL;
1429 
1430  } else {
1431  bu_bomb("nmg_booltree_evaluate(): error, unknown operation\n");
1432  }
1433  }
1434 
1435  if (tl->tr_op != OP_NMG_TESS) {
1436  bu_bomb("nmg_booltree_evaluate(): bad left tree\n");
1437  }
1438  if (tr->tr_op != OP_NMG_TESS) {
1439  bu_bomb("nmg_booltree_evaluate(): bad right tree\n");
1440  }
1441 
1442  if (!nmg_bool_eval_silent) {
1443  bu_log(" {%s}%s{%s}\n", tl->tr_d.td_name, op_str, tr->tr_d.td_name);
1444  }
1445 
1446  NMG_CK_REGION(tr->tr_d.td_r);
1447  NMG_CK_REGION(tl->tr_d.td_r);
1448 
1449  if (nmg_ck_closed_region(tr->tr_d.td_r, tol)) {
1450  bu_bomb("nmg_booltree_evaluate(): ERROR, non-closed shell (r)\n");
1451  }
1452  if (nmg_ck_closed_region(tl->tr_d.td_r, tol)) {
1453  bu_bomb("nmg_booltree_evaluate(): ERROR, non-closed shell (l)\n");
1454  }
1455 
1456  nmg_r_radial_check(tr->tr_d.td_r, tol);
1457  nmg_r_radial_check(tl->tr_d.td_r, tol);
1458 
1459  if (RTG.NMG_debug & DEBUG_BOOL) {
1460  bu_log("Before model fuse\nShell A:\n");
1461  nmg_pr_s_briefly(BU_LIST_FIRST(shell, &tl->tr_d.td_r->s_hd), "");
1462  bu_log("Shell B:\n");
1463  nmg_pr_s_briefly(BU_LIST_FIRST(shell, &tr->tr_d.td_r->s_hd), "");
1464  }
1465 
1466  /* move operands into the same model */
1467  if (tr->tr_d.td_r->m_p != tl->tr_d.td_r->m_p) {
1468  nmg_merge_models(tl->tr_d.td_r->m_p, tr->tr_d.td_r->m_p);
1469  }
1470 
1471  /* input r1 and r2 are destroyed, output is new region */
1472  reg = nmg_do_bool(tl->tr_d.td_r, tr->tr_d.td_r, op, tol);
1473 
1474  /* build string of result name */
1475  rem = strlen(tl->tr_d.td_name) + 3 + strlen(tr->tr_d.td_name) + 2 + 1;
1476  name = (char *)bu_calloc(rem, sizeof(char), "nmg_booltree_evaluate name");
1477  snprintf(name, rem, "(%s%s%s)", tl->tr_d.td_name, op_str, tr->tr_d.td_name);
1478 
1479  /* clean up child tree nodes */
1480  tl->tr_d.td_r = (struct nmgregion *)NULL;
1481  tr->tr_d.td_r = (struct nmgregion *)NULL;
1482  db_free_tree(tl, resp);
1483  db_free_tree(tr, resp);
1484 
1485 
1486  if (reg) {
1487  /* convert argument binary node into a result node */
1488  NMG_CK_REGION(reg);
1489  nmg_r_radial_check(reg, tol);
1490  tp->tr_op = OP_NMG_TESS;
1491  tp->tr_d.td_r = reg;
1492  tp->tr_d.td_name = name;
1493 
1494  if (RTG.NMG_debug & DEBUG_VERIFY) {
1495  nmg_vshell(&reg->s_hd, reg);
1496  }
1497  return tp;
1498 
1499  } else {
1500  /* resulting region was null */
1501  tp->tr_op = OP_NOP;
1502  return TREE_NULL;
1503  }
1504 
1505 }
1506 
1507 
1508 /**
1509  * This is the main application interface to the NMG Boolean
1510  * Evaluator.
1511  *
1512  * This routine has the opportunity to do "once only" operations
1513  * before and after the boolean tree is walked.
1514  *
1515  * Returns -
1516  * 0 Boolean went OK. Result region is in tp->tr_d.td_r
1517  * !0 Boolean produced null result.
1518  *
1519  * The caller is responsible for freeing 'tp' in either case,
1520  * typically with db_free_tree(tp);
1521  */
1522 int
1523 nmg_boolean(union tree *tp, struct model *m, const struct bn_tol *tol, struct resource *resp)
1524 {
1525  union tree *result;
1526  int ret;
1527 
1528  RT_CK_TREE(tp);
1529  NMG_CK_MODEL(m);
1530  BN_CK_TOL(tol);
1531  RT_CK_RESOURCE(resp);
1532 
1533  if (RTG.NMG_debug & (DEBUG_BOOL|DEBUG_BASIC)) {
1534  bu_log("\n\nnmg_boolean(tp=%p, m=%p) START\n",
1535  (void *)tp, (void *)m);
1536  }
1537 
1538  /* The nmg_model_fuse function was removed from this point in the
1539  * boolean process since not all geometry that is to be processed is
1540  * always within the single 'm' nmg model structure passed into this
1541  * function. In some cases the geometry resides in multiple nmg model
1542  * structures within the 'tp' tree that is passed into this function.
1543  * Running nmg_model_fuse is still required but is done later, i.e.
1544  * within the nmg_booltree_evaluate function just before the nmg_do_bool
1545  * function is called which is when the geometry, in which the boolean
1546  * to be performed, is always in a single nmg model structure.
1547  */
1548 
1549  /*
1550  * Evaluate the nodes of the boolean tree one at a time, until
1551  * only a single region remains.
1552  */
1553  result = nmg_booltree_evaluate(tp, tol, resp);
1554 
1555  if (result == TREE_NULL) {
1556  bu_log("nmg_boolean(): result of nmg_booltree_evaluate() is NULL\n");
1557  rt_pr_tree(tp, 0);
1558  ret = 1;
1559  goto out;
1560  }
1561 
1562  if (result != tp) {
1563  bu_bomb("nmg_boolean(): result of nmg_booltree_evaluate() isn't tp\n");
1564  }
1565 
1566  RT_CK_TREE(result);
1567 
1568  if (tp->tr_op != OP_NMG_TESS) {
1569  bu_log("nmg_boolean(): result of nmg_booltree_evaluate() op != OP_NMG_TESS\n");
1570  rt_pr_tree(tp, 0);
1571  ret = 1;
1572  goto out;
1573  }
1574 
1575  if (tp->tr_d.td_r == (struct nmgregion *)NULL) {
1576  /* Pointers are all OK, but boolean produced null set */
1577  ret = 1;
1578  goto out;
1579  }
1580 
1581  /* move result into correct model */
1582  nmg_merge_models(m, tp->tr_d.td_r->m_p);
1583  ret = 0;
1584 
1585 out:
1586  if (RTG.NMG_debug & (DEBUG_BOOL|DEBUG_BASIC)) {
1587  bu_log("nmg_boolean(tp=%p, m=%p) END, ret=%d\n\n",
1588  (void *)tp, (void *)m, ret);
1589  }
1590 
1591  return ret;
1592 }
1593 /** @} */
1594 
1595 /*
1596  * Local Variables:
1597  * mode: C
1598  * tab-width: 8
1599  * indent-tabs-mode: t
1600  * c-file-style: "stroustrup"
1601  * End:
1602  * ex: shiftwidth=4 tabstop=8
1603  */
void nmg_pl_s(FILE *fp, const struct shell *s)
Definition: nmg_plot.c:746
int nmg_ck_closed_surf(const struct shell *s, const struct bn_tol *tol)
Definition: nmg_ck.c:1448
#define BU_LIST_PNEXT_CIRC(structure, p)
Definition: list.h:442
char * d_namep
pointer to name string
Definition: raytrace.h:859
#define BU_LIST_FOR(p, structure, hp)
Definition: list.h:365
struct model ** ts_m
ptr to ptr to NMG "model"
Definition: raytrace.h:1072
#define NMG_EDGEUSE_MAGIC
Definition: magic.h:120
int nmg_model_fuse(struct model *m, const struct bn_tol *tol)
Definition: nmg_fuse.c:1919
void bu_log(const char *,...) _BU_ATTR_PRINTF12
Definition: log.c:176
struct nmgregion * nmg_do_bool(struct nmgregion *rA, struct nmgregion *rB, const int oper, const struct bn_tol *tol)
Definition: nmg_bool.c:1119
int nmg_edge_fuse(const uint32_t *magic_p, const struct bn_tol *tol)
Definition: nmg_fuse.c:1062
void nmg_stash_model_to_file(const char *filename, const struct model *m, const char *title)
Definition: nmg_misc.c:4500
int nmg_unbreak_region_edges(uint32_t *magic_p)
Definition: nmg_misc.c:4650
struct nmgregion * td_r
ptr to NMG region
Definition: raytrace.h:1168
int nmg_kfu(struct faceuse *fu1)
Definition: nmg_mk.c:1207
#define OP_NOP
Leaf with no effect.
Definition: raytrace.h:1132
double dist
>= 0
Definition: tol.h:73
void nmg_evaluate_boolean(struct shell *sA, struct shell *sB, int op, char **classlist, const struct bn_tol *tol)
Definition: nmg_eval.c:189
if lu s
Definition: nmg_mod.c:3860
Definition: clone.c:90
void bu_ptbl_init(struct bu_ptbl *b, size_t len, const char *str)
Definition: ptbl.c:32
union tree * nmg_booltree_leaf_tnurb(struct db_tree_state *tsp, const struct db_full_path *pathp, struct rt_db_internal *ip, void *client_data)
Definition: nmg_bool.c:1241
lu
Definition: nmg_mod.c:3855
#define OP_NMG_TESS
Leaf: tr_stp -> nmgregion.
Definition: raytrace.h:1137
#define VSETALL(a, s)
Definition: color.c:54
int nmg_kr(struct nmgregion *r)
Definition: nmg_mk.c:1595
void nmg_show_each_loop(struct shell *s, char **classlist, int redraw, int fancy, const char *str)
Definition: nmg_bool.c:209
struct region * tb_regionp
ptr to containing region
Definition: raytrace.h:1148
int bu_ptbl_rm(struct bu_ptbl *b, const long *p)
void nmg_rebound(struct model *m, const struct bn_tol *tol)
Definition: nmg_misc.c:2072
Header file for the BRL-CAD common definitions.
#define DB_FULL_PATH_CUR_DIR(_pp)
Definition: db_fullpath.h:51
struct faceuse * nmg_dup_face(struct faceuse *fu, struct shell *s)
Definition: nmg_mod.c:1827
int bu_ptbl_ins(struct bu_ptbl *b, long *p)
int nmg_shell_is_empty(register const struct shell *s)
Definition: nmg_info.c:203
void nmg_show_broken_classifier_stuff(uint32_t *p, char **classlist, int all_new, int fancy, const char *a_string)
Definition: nmg_plot.c:1733
#define BU_LIST_NON_EMPTY(hp)
Definition: list.h:296
#define MAX_FASTF
Definition: defines.h:340
int nmg_kvu(struct vertexuse *vu)
Definition: nmg_mk.c:1095
void rt_pr_tree(const union tree *tp, int lvl)
NMG_CK_LOOPUSE(lu)
BU_LIST_DEQUEUE & eu1
Definition: nmg_mod.c:3839
union tree * tb_left
Definition: raytrace.h:1149
Definition: ptbl.h:62
int nmg_ks(struct shell *s)
Definition: nmg_mk.c:1546
char * nmg_manifolds(struct model *m)
Definition: nmg_manif.c:413
void bu_vls_free(struct bu_vls *vp)
Definition: vls.c:248
#define OP_SUBTRACT
Binary: L subtract R.
Definition: raytrace.h:1129
void nmg_sanitize_s_lv(struct shell *s, int orient)
Definition: nmg_mod.c:564
#define OP_INTERSECT
Binary: L intersect R.
Definition: raytrace.h:1128
#define RT_G_DEBUG
Definition: raytrace.h:1718
int nmg_check_closed_shell(const struct shell *s, const struct bn_tol *tol)
Definition: nmg_misc.c:1142
uint32_t NMG_debug
debug bits for NMG's see nmg.h
Definition: raytrace.h:1699
void nmg_shell_coplanar_face_merge(struct shell *s, const struct bn_tol *tol, const int simplify)
Definition: nmg_mod.c:93
#define RT_CK_DB_INTERNAL(_p)
Definition: raytrace.h:207
void * bu_calloc(size_t nelem, size_t elsize, const char *str)
Definition: malloc.c:321
#define RT_CK_DIR(_dp)
Definition: raytrace.h:876
int debug_file_count
Definition: nmg_bool.c:57
struct vertexuse * nmg_find_vertex_in_lu(const struct vertex *v, const struct loopuse *lu)
Definition: nmg_extrude.c:266
#define BU_PTBL_GET(ptbl, i)
Definition: ptbl.h:108
int nmg_has_dangling_faces(uint32_t *magic_p, const char *manifolds)
Definition: nmg_bool.c:177
void bu_vls_sprintf(struct bu_vls *vls, const char *fmt,...) _BU_ATTR_PRINTF23
Definition: vls.c:707
const struct rt_functab * idb_meth
for ft_ifree(), etc.
Definition: raytrace.h:194
int nmg_vertex_fuse(const uint32_t *magic_p, const struct bn_tol *tol)
Definition: nmg_fuse.c:306
int nmg_bool_eval_silent
Definition: nmg_bool.c:1281
void nmg_kill_wire_edges(struct shell *s)
Definition: nmg_bool.c:602
void nmg_r_radial_check(const struct nmgregion *r, const struct bn_tol *tol)
Definition: nmg_fuse.c:3457
#define TREE_NULL
Definition: raytrace.h:1181
#define RT_CK_TESS_TOL(_p)
Definition: raytrace.h:184
void nmg_km(struct model *m)
Definition: nmg_mk.c:1634
void db_free_tree(union tree *tp, struct resource *resp)
Definition: db_tree.c:1296
void nmg_class_shells(struct shell *sA, struct shell *sB, char **classlist, const struct bn_tol *tol)
Definition: nmg_class.c:1975
union tree * nmg_booltree_leaf_tess(struct db_tree_state *tsp, const struct db_full_path *pathp, struct rt_db_internal *ip, void *client_data)
Definition: nmg_bool.c:1175
void nmg_s_split_touchingloops(struct shell *s, const struct bn_tol *tol)
Definition: nmg_mod.c:646
void nmg_pl_isect(const char *filename, const struct shell *s, const struct bn_tol *tol)
Definition: nmg_plot.c:1256
int nmg_simplify_shell_edges(struct shell *s, const struct bn_tol *tol)
Definition: nmg_misc.c:9975
void nmg_edge_tabulate(struct bu_ptbl *tab, const uint32_t *magic_p)
Definition: nmg_info.c:2138
#define BU_LIST_PNEXT(structure, p)
Definition: list.h:422
struct tree::tree_node tr_b
#define UNUSED(parameter)
Definition: common.h:239
const char * manifolds
Definition: nmg_bool.c:52
goto out
Definition: nmg_mod.c:3846
Support for uniform tolerances.
Definition: tol.h:71
struct shell * nmg_find_s_of_eu(const struct edgeuse *eu)
Definition: nmg_info.c:235
#define BN_CK_TOL(_p)
Definition: tol.h:82
#define BU_LIST_FIRST_MAGIC(hp)
Definition: list.h:416
char * bu_vls_addr(const struct bu_vls *vp)
Definition: vls.c:111
struct tree::tree_nmgregion tr_d
struct resource * ts_resp
Per-CPU data.
Definition: raytrace.h:1074
#define NMG_VERTEXUSE_MAGIC
Definition: magic.h:145
void nmg_vshell(const struct bu_list *hp, const struct nmgregion *r)
Definition: nmg_ck.c:591
struct nmgregion * nmg_mrsv(struct model *m)
Definition: nmg_mk.c:306
void nmg_visit(const uint32_t *magicp, const struct nmg_visit_handlers *htab, void *state)
Definition: nmg_visit.c:263
void nmg_vmodel(const struct model *m)
Definition: nmg_ck.c:635
void pdv_3line(register FILE *plotfp, const fastf_t *a, const fastf_t *b)
Definition: plot3.c:642
void stash_shell(struct shell *s, char *file_name, char *title, const struct bn_tol *tol)
Definition: nmg_bool.c:245
void bu_ptbl_free(struct bu_ptbl *b)
Definition: ptbl.c:226
#define RT_CK_FULL_PATH(_p)
Definition: db_fullpath.h:59
int nmg_class_nothing_broken
Definition: nmg_plot.c:1443
#define BU_LIST_PPREV_CIRC(structure, p)
Definition: list.h:450
struct model * nmg_mm(void)
Definition: nmg_mk.c:235
int nmg_simplify_shell(struct shell *s)
Definition: nmg_mod.c:209
void nmg_pr_s(const struct shell *s, char *h)
Definition: nmg_pr.c:230
union tree * tb_right
Definition: raytrace.h:1150
int nmg_klu(struct loopuse *lu1)
Definition: nmg_mk.c:1277
int nmg_kill_cracks(struct shell *s)
Definition: nmg_misc.c:8187
void nmg_crackshells(struct shell *s1, struct shell *s2, const struct bn_tol *tol)
Definition: nmg_inter.c:6286
void nmg_region_v_unique(struct nmgregion *r1, const struct bn_tol *tol)
Definition: nmg_fuse.c:127
#define RT_CK_RESOURCE(_p)
Definition: raytrace.h:1490
#define BU_PTBL_END(ptbl)
Definition: ptbl.h:106
const struct rt_tess_tol * ts_ttol
Tessellation tolerance.
Definition: raytrace.h:1070
#define RT_GET_TREE(_tp, _res)
Definition: raytrace.h:1210
void nmg_m_reindex(struct model *m, register long int newindex)
Definition: nmg_index.c:298
int(* ft_tnurb)(struct nmgregion **, struct model *, struct rt_db_internal *, const struct bn_tol *)
Definition: raytrace.h:2120
int nmg_dangling_face(const struct faceuse *fu, register const char *manifolds)
Definition: nmg_manif.c:62
eu1 up magic_p
Definition: nmg_mod.c:3915
int(* ft_tessellate)(struct nmgregion **, struct model *, struct rt_db_internal *, const struct rt_tess_tol *, const struct bn_tol *)
Definition: raytrace.h:2114
size_t nmg_plot_open_edges(const uint32_t *magic_p, const char *prefix)
Definition: nmg_bool.c:72
const char * td_name
If non-null, dynamic string describing heritage of this region.
Definition: raytrace.h:1167
union tree * nmg_booltree_evaluate(register union tree *tp, const struct bn_tol *tol, struct resource *resp)
Definition: nmg_bool.c:1307
void nmg_s_radial_check(struct shell *s, const struct bn_tol *tol)
Definition: nmg_fuse.c:3433
struct vertexuse * nmg_find_v_in_shell(const struct vertex *v, const struct shell *s, int edges_only)
Definition: nmg_info.c:1514
struct shell * nmg_find_s_of_vu(const struct vertexuse *vu)
Definition: nmg_info.c:249
int nmg_keu(register struct edgeuse *eu1)
Definition: nmg_mk.c:1413
void bu_free(void *ptr, const char *str)
Definition: malloc.c:328
#define RT_CK_TREE(_p)
Definition: raytrace.h:1182
NMG_CK_SHELL(s)
void nmg_face_tabulate(struct bu_ptbl *tab, const uint32_t *magic_p)
Definition: nmg_info.c:2247
void nmg_kill_non_common_cracks(struct shell *sA, struct shell *sB)
Definition: nmg_bool.c:272
char * file_name
Definition: fb2pix.c:40
void nmg_pr_s_briefly(const struct shell *s, char *h)
Definition: nmg_pr.c:270
#define BU_VLS_INIT_ZERO
Definition: vls.h:84
const struct edgeuse * nmg_radial_face_edge_in_shell(const struct edgeuse *eu)
Definition: nmg_info.c:1021
#define DEBUG_TREEWALK
22 Database tree traversal
Definition: raytrace.h:107
void nmg_shell_a(struct shell *s, const struct bn_tol *tol)
Definition: nmg_mk.c:2474
bn_poly_t rem[1]
Definition: vls.h:56
int nmg_ck_closed_region(const struct nmgregion *r, const struct bn_tol *tol)
Definition: nmg_ck.c:1493
int nmg_break_e_on_v(const uint32_t *magic_p, const struct bn_tol *tol)
Definition: nmg_fuse.c:1802
void bu_bomb(const char *str) _BU_ATTR_NORETURN
Definition: bomb.c:91
void nmg_kill_anti_loops(struct shell *s)
Definition: nmg_bool.c:506
int nmg_boolean(union tree *tp, struct model *m, const struct bn_tol *tol, struct resource *resp)
Definition: nmg_bool.c:1523
const struct bn_tol * ts_tol
Math tolerance.
Definition: raytrace.h:1071
#define OP_UNION
Binary: L union R.
Definition: raytrace.h:1127
eu2
Definition: nmg_mod.c:3875
#define BU_LIST_NOT_HEAD(p, hp)
Definition: list.h:324
#define BU_LIST_APPEND_LIST(dest_hp, src_hp)
Definition: list.h:281
#define BU_LIST_FIRST(structure, hp)
Definition: list.h:312
void nmg_merge_models(struct model *m1, struct model *m2)
Definition: nmg_index.c:738
#define bu_strdup(s)
Definition: str.h:71
void nmg_vertex_tabulate(struct bu_ptbl *tab, const uint32_t *magic_p)
Definition: nmg_info.c:1985
struct rt_g RTG
Definition: globals.c:39
struct model * nmg_find_model(const uint32_t *magic_p_arg)
Definition: nmg_info.c:57