xref: /illumos-gate/usr/src/cmd/sgs/rtld/common/tsort.c (revision c545f712400280e1f18c1488fde559c5b9a6b6ff)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 /*
28  * Utilities to handle shared object dependency graph.
29  *
30  * The algorithms used in this file are taken from the following book:
31  *	Algorithms in C
32  *		Robert Sedgewick
33  *		Addison-Wesley Publishing company
34  *		ISBN 0-201-51425-7
35  * 	From the following chapters:
36  *		Chapter 29 Elementary Graph Algorithms
37  *		Chapter 32 Directed Graph
38  */
39 
40 #include	<sys/types.h>
41 #include	<stdarg.h>
42 #include	<stdio.h>
43 #include	<dlfcn.h>
44 #include	<signal.h>
45 #include	<locale.h>
46 #include	<string.h>
47 #include	<libintl.h>
48 #include	<debug.h>
49 #include	"_rtld.h"
50 #include	"msg.h"
51 
52 /*
53  * Structure for maintaining sorting state.
54  */
55 typedef struct {
56 	Rt_map		**s_lmpa;	/* link-map[] (returned to caller) */
57 	Rt_map		*s_lmp;		/* originating link-map */
58 	Rt_map		**s_stack;	/* strongly connected component stack */
59 	APlist 		*s_scc;		/* cyclic list */
60 	APlist		*s_queue;	/* depth queue for cyclic components */
61 	int		s_sndx;		/* present stack index */
62 	int 		s_lndx;		/* present link-map index */
63 	int		s_num;		/* number of objects to sort */
64 	int		s_initfirst;	/* no. of INITFIRST entries */
65 } Sort;
66 
67 #define	AL_CNT_SCC	10
68 
69 /*
70  * qsort(3c) comparison function.
71  */
72 static int
73 compare(const void * lmpp1, const void * lmpp2)
74 {
75 	Rt_map	*lmp1 = *((Rt_map **)lmpp1);
76 	Rt_map	*lmp2 = *((Rt_map **)lmpp2);
77 
78 	if (IDX(lmp1) > IDX(lmp2))
79 		return (-1);
80 	if (IDX(lmp1) < IDX(lmp2))
81 		return (1);
82 	return (0);
83 }
84 
85 /*
86  * This routine is called when a cyclic dependency is detected between strongly
87  * connected components.  The nodes within the cycle are reverse breadth-first
88  * sorted.
89  */
90 static int
91 sort_scc(Sort * sort, int fndx, int flag)
92 {
93 	static const char	*tfmt = 0, *ffmt;
94 	static int		cnt = 1;
95 	int			ndx;
96 	Rt_map			*lmp;
97 	Lm_list			*lml = LIST(sort->s_lmp);
98 	Word			lmflags = lml->lm_flags;
99 	Word			init, unref;
100 
101 	/*
102 	 * If this is the first cyclic dependency traverse the new objects that
103 	 * have been added to the link-map list and for each object establish
104 	 * a unique depth index.  We build this dynamically as we have no idea
105 	 * of the number of objects that will be inspected (logic matches that
106 	 * used by dlsym() to traverse lazy dependencies).
107 	 */
108 	if (sort->s_queue == NULL) {
109 		Aliste	idx1;
110 		Rt_map	*lmp, *lmp2;
111 
112 		lmp = sort->s_lmp;
113 		ndx = 1;
114 
115 		if (aplist_append(&sort->s_queue, lmp, sort->s_num) == NULL)
116 			return (0);
117 
118 		IDX(lmp) = ndx++;
119 
120 		for (APLIST_TRAVERSE(sort->s_queue, idx1, lmp2)) {
121 			Bnd_desc	*bdp;
122 			Aliste		idx2;
123 
124 			for (APLIST_TRAVERSE(DEPENDS(lmp2), idx2, bdp)) {
125 				Rt_map	*lmp = bdp->b_depend;
126 
127 				if (IDX(lmp))
128 					continue;
129 
130 				/*
131 				 * If we're .init processing and this depend-
132 				 * encies .init has been called, skip it.
133 				 */
134 				if ((flag & RT_SORT_REV) &&
135 				    (FLAGS(lmp) & FLG_RT_INITCALL))
136 					continue;
137 
138 				if (aplist_append(&sort->s_queue, lmp,
139 				    sort->s_num) == NULL)
140 					return (0);
141 
142 				IDX(lmp) = ndx++;
143 			}
144 		}
145 	}
146 
147 	/*
148 	 * Sort the cyclics.
149 	 */
150 	qsort(&(sort->s_lmpa[fndx]), sort->s_lndx - fndx, sizeof (Rt_map *),
151 	    compare);
152 
153 	/*
154 	 * Under ldd -i, or debugging, print this cycle.  Under ldd -i/-U assign
155 	 * each object a group identifier so that cyclic dependent callers can
156 	 * be better traced (see trace_sort()), or analyzed for non-use.
157 	 */
158 	if (((init = (lmflags & LML_FLG_TRC_INIT)) == 0) &&
159 	    ((unref = (lmflags & LML_FLG_TRC_UNREF)) == 0) &&
160 	    (DBG_ENABLED == 0))
161 		return (1);
162 
163 	if (init) {
164 		if (tfmt == 0) {
165 			tfmt = MSG_INTL(MSG_LDD_INIT_FMT_01);
166 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
167 		}
168 		(void) printf(tfmt, cnt);
169 	}
170 	DBG_CALL(Dbg_util_scc_title(lml, (flag & RT_SORT_REV)));
171 
172 	/*
173 	 * Identify this cyclic group, and under ldd -i print the cycle in the
174 	 * order its components will be run.
175 	 */
176 	if (flag & RT_SORT_REV) {
177 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
178 			lmp = sort->s_lmpa[ndx];
179 			CYCGROUP(lmp) = cnt;
180 
181 			if (init)
182 				(void) printf(ffmt, NAME(lmp));
183 			DBG_CALL(Dbg_util_scc_entry(lmp, ndx));
184 		}
185 		cnt++;
186 
187 	} else if (DBG_ENABLED) {
188 		for (ndx = sort->s_lndx - 1; ndx >= fndx; ndx--) {
189 			lmp = sort->s_lmpa[ndx];
190 			DBG_CALL(Dbg_util_scc_entry(lmp, ndx));
191 		}
192 	}
193 
194 	/*
195 	 * If we're looking for unused dependencies determine if any of these
196 	 * cyclic components are referenced from outside of the cycle.
197 	 */
198 	if (unref || DBG_ENABLED) {
199 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
200 			Bnd_desc	*bdp;
201 			Aliste		idx;
202 
203 			lmp = sort->s_lmpa[ndx];
204 
205 			/*
206 			 * If this object has a handle then it can be in use by
207 			 * anyone.
208 			 */
209 			if (HANDLES(lmp))
210 				return (1);
211 
212 			/*
213 			 * Traverse this objects callers looking for outside
214 			 * references.
215 			 */
216 			for (APLIST_TRAVERSE(CALLERS(lmp), idx, bdp)) {
217 				Rt_map		*clmp = bdp->b_caller;
218 
219 				if ((bdp->b_flags & BND_REFER) == 0)
220 					continue;
221 
222 				if (CYCGROUP(lmp) != CYCGROUP(clmp))
223 					return (1);
224 			}
225 		}
226 
227 		/*
228 		 * If we're here then none of the cyclic dependents have been
229 		 * referenced from outside of the cycle, mark them as unused.
230 		 */
231 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
232 			lmp = sort->s_lmpa[ndx];
233 			FLAGS1(lmp) &= ~FL1_RT_USED;
234 		}
235 	}
236 	return (1);
237 }
238 
239 /*
240  * Take elements off of the stack and move them to the link-map array. Typically
241  * this routine just pops one strongly connected component (individual link-map)
242  * at a time.  When a cyclic dependency has been detected the stack will contain
243  * more than just the present object to process, and will trigger the later call
244  * to sort_scc() to sort these elements.
245  */
246 static int
247 visit(Lm_list *lml, Rt_map * lmp, Sort *sort, int flag)
248 {
249 	APlist		*alp = NULL;
250 	int		num = sort->s_lndx;
251 	Word		tracing = lml->lm_flags & LML_FLG_TRC_ENABLE;
252 	Rt_map		*tlmp;
253 
254 	do {
255 		tlmp = sort->s_stack[--(sort->s_sndx)];
256 		SORTVAL(tlmp) = sort->s_num;
257 		DBG_CALL(Dbg_util_collect(tlmp, sort->s_lndx, flag));
258 		sort->s_lmpa[(sort->s_lndx)++] = tlmp;
259 
260 		if (flag & RT_SORT_REV) {
261 			/*
262 			 * Indicate the object has had its .init collected.
263 			 * Note, that regardless of the object having a .init
264 			 * the object is added to the tsort list, as it's from
265 			 * this list that any post-init flags are established.
266 			 */
267 			FLAGS(tlmp) |= FLG_RT_INITCLCT;
268 			lml->lm_init--;
269 		} else {
270 			/*
271 			 * Indicate the object has had its .fini collected.
272 			 * Note, that regardless of the object having a .fini,
273 			 * the object is added to the tsort list, as it's from
274 			 * this list that any audit_objclose() activity is
275 			 * triggered.
276 			 */
277 			FLAGS(tlmp) |= FLG_RT_FINICLCT;
278 		}
279 
280 		/*
281 		 * If tracing, save the strongly connected component.
282 		 */
283 		if (tracing && (aplist_append(&alp, tlmp,
284 		    AL_CNT_SCC) == 0))
285 			return (0);
286 	} while (tlmp != lmp);
287 
288 	/*
289 	 * Determine if there are cyclic dependencies to process.  If so, sort
290 	 * the components, and retain them for tracing output.
291 	 */
292 	if (sort->s_lndx > (num + 1)) {
293 		if (sort_scc(sort, num, flag) == 0)
294 			return (0);
295 
296 		if (tracing && (aplist_append(&sort->s_scc, alp,
297 		    AL_CNT_SCC) == 0))
298 			return (0);
299 	} else if (alp)
300 		free(alp);
301 
302 	return (1);
303 }
304 
305 static int
306 dep_visit(Lm_list *, Rt_map *, uint_t, Rt_map *, Sort *, int);
307 
308 static int
309 _dep_visit(Lm_list *lml, int min, Rt_map *clmp, Rt_map *dlmp, uint_t bflags,
310     Sort *sort, int flag)
311 {
312 	int	_min;
313 
314 	/*
315 	 * Only collect objects that belong to the callers link-map.  Catches
316 	 * cross dependencies (filtering) to ld.so.1.
317 	 */
318 	if (LIST(dlmp) != lml)
319 		return (min);
320 
321 	/*
322 	 * Determine if this object hasn't been inspected.
323 	 */
324 	if ((_min = SORTVAL(dlmp)) == -1) {
325 		if (flag & RT_SORT_REV) {
326 			/*
327 			 * For .init processing, only collect objects that have
328 			 * been relocated and haven't already been collected.
329 			 */
330 			if ((FLAGS(dlmp) & (FLG_RT_RELOCED |
331 			    FLG_RT_INITCLCT)) != FLG_RT_RELOCED)
332 				return (min);
333 
334 			/*
335 			 * If this object contains no .init, there's no need to
336 			 * establish a dependency.
337 			 */
338 			if ((INIT(dlmp) == 0) && (INITARRAY(dlmp) == 0))
339 				return (min);
340 		} else {
341 			/*
342 			 * For .fini processing only collect objects that have
343 			 * had their .init collected, and haven't already been
344 			 * .fini collected.
345 			 */
346 			if ((FLAGS(dlmp) & (FLG_RT_INITCLCT |
347 			    FLG_RT_FINICLCT)) != FLG_RT_INITCLCT)
348 				return (min);
349 
350 			/*
351 			 * If we're deleting a subset of objects, only collect
352 			 * those marked for deletion.
353 			 */
354 			if ((flag & RT_SORT_DELETE) &&
355 			    ((FLAGS(dlmp) & FLG_RT_DELETE) == 0))
356 				return (min);
357 
358 			/*
359 			 * If this object contains no .fini, there's no need to
360 			 * establish a dependency.
361 			 */
362 			if ((FINI(dlmp) == 0) && (FINIARRAY(dlmp) == 0))
363 				return (min);
364 		}
365 
366 		/*
367 		 * Inspect this new dependency.
368 		 */
369 		if ((_min = dep_visit(lml, clmp, bflags, dlmp,
370 		    sort, flag)) == -1)
371 			return (-1);
372 	}
373 
374 	/*
375 	 * Keep track of the smallest SORTVAL that has been encountered.  If
376 	 * this value is smaller than the present object, then the dependency
377 	 * edge has cycled back to objects that have been processed earlier
378 	 * along this dependency edge.
379 	 */
380 	if (_min < min) {
381 		DBG_CALL(Dbg_util_edge_out(clmp, sort->s_stack[_min]));
382 		return (_min);
383 	} else
384 		return (min);
385 }
386 
387 /*
388  * Visit the dependencies of each object.
389  */
390 static int
391 dep_visit(Lm_list *lml, Rt_map *clmp, uint_t cbflags, Rt_map *lmp, Sort *sort,
392     int flag)
393 {
394 	int 		min;
395 	Aliste		idx;
396 	Bnd_desc	*bdp;
397 	Dyninfo		*dip;
398 
399 	min = SORTVAL(lmp) = sort->s_sndx;
400 	sort->s_stack[(sort->s_sndx)++] = lmp;
401 
402 	if (FLAGS(lmp) & FLG_RT_INITFRST)
403 		sort->s_initfirst++;
404 
405 	DBG_CALL(Dbg_util_edge_in(lml, clmp, cbflags, lmp, min, flag));
406 
407 	/*
408 	 * Traverse both explicit and implicit dependencies.
409 	 */
410 	for (APLIST_TRAVERSE(DEPENDS(lmp), idx, bdp)) {
411 		if ((min = _dep_visit(lml, min, lmp, bdp->b_depend,
412 		    bdp->b_flags, sort, flag)) == -1)
413 			return (-1);
414 	}
415 
416 	/*
417 	 * Traverse any filtee dependencies.
418 	 */
419 	if (((dip = DYNINFO(lmp)) != 0) && (FLAGS1(lmp) & MSK_RT_FILTER)) {
420 		uint_t	cnt, max = DYNINFOCNT(lmp);
421 
422 		for (cnt = 0; cnt < max; cnt++, dip++) {
423 			Pnode	*pnp = (Pnode *)dip->di_info;
424 
425 			if ((pnp == 0) ||
426 			    ((dip->di_flags & MSK_DI_FILTER) == 0))
427 				continue;
428 
429 			for (; pnp; pnp = pnp->p_next) {
430 				Grp_hdl		*ghp = (Grp_hdl *)dip->di_info;
431 				Grp_desc	*gdp;
432 
433 				if ((pnp->p_len == 0) ||
434 				    ((ghp = (Grp_hdl *)pnp->p_info) == 0))
435 					continue;
436 
437 				for (ALIST_TRAVERSE(ghp->gh_depends, idx,
438 				    gdp)) {
439 
440 					if (gdp->gd_depend == lmp)
441 						continue;
442 					if ((min = _dep_visit(lml, min, lmp,
443 					    gdp->gd_depend, BND_FILTER,
444 					    sort, flag)) == -1)
445 						return (-1);
446 				}
447 			}
448 		}
449 	}
450 
451 	/*
452 	 * Having returned to where the minimum SORTVAL is equivalent to the
453 	 * object that has just been processed, collect any dependencies that
454 	 * are available on the sorting stack.
455 	 */
456 	if (min == SORTVAL(lmp)) {
457 		if (visit(lml, lmp, sort, flag) == 0)
458 			return (-1);
459 	}
460 	return (min);
461 }
462 
463 
464 #ifndef	LD_BREADTH_DISABLED
465 /*
466  * Reverse LD_BREATH search (used to fire .init's the old fashioned way).
467  */
468 static void
469 rb_visit(Rt_map * lmp, Sort * sort)
470 {
471 	Rt_map *	nlmp;
472 
473 	if ((nlmp = NEXT_RT_MAP(lmp)) != 0)
474 		rb_visit(nlmp, sort);
475 
476 	/*
477 	 * Only collect objects that have been relocated and haven't already
478 	 * been collected.
479 	 */
480 	if ((FLAGS(lmp) & (FLG_RT_RELOCED | FLG_RT_INITCLCT)) ==
481 	    FLG_RT_RELOCED) {
482 		sort->s_lmpa[(sort->s_lndx)++] = lmp;
483 		FLAGS(lmp) |= FLG_RT_INITCLCT;
484 		LIST(lmp)->lm_init--;
485 	}
486 }
487 
488 /*
489  * Forward LD_BREATH search (used to fire .fini's the old fashioned way).
490  */
491 static void
492 fb_visit(Rt_map * lmp, Sort * sort, int flag)
493 {
494 	while (lmp) {
495 		/*
496 		 * If we're called from dlclose() then we only collect those
497 		 * objects marked for deletion.
498 		 */
499 		if (!(flag & RT_SORT_DELETE) || (FLAGS(lmp) & FLG_RT_DELETE)) {
500 			/*
501 			 * Only collect objects that have had their .init
502 			 * collected, and haven't already been .fini collected.
503 			 */
504 			if ((FLAGS(lmp) &
505 			    (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
506 			    (FLG_RT_INITCLCT)) {
507 				sort->s_lmpa[(sort->s_lndx)++] = lmp;
508 				FLAGS(lmp) |= FLG_RT_FINICLCT;
509 			}
510 		}
511 		lmp = NEXT_RT_MAP(lmp);
512 	}
513 }
514 #endif
515 
516 /*
517  * Find corresponding strongly connected component structure.
518  */
519 static APlist *
520 trace_find_scc(Sort *sort, Rt_map *lmp)
521 {
522 	APlist		*alp;
523 	Aliste		idx1;
524 
525 	for (APLIST_TRAVERSE(sort->s_scc, idx1, alp)) {
526 		Rt_map	*lmp2;
527 		Aliste	idx2;
528 
529 		for (APLIST_TRAVERSE(alp, idx2, lmp2)) {
530 			if (lmp == lmp2)
531 				return (alp);
532 		}
533 	}
534 	return (NULL);
535 }
536 
537 /*
538  * Print out the .init dependency information (ldd).
539  */
540 static void
541 trace_sort(Sort * sort)
542 {
543 	int 		ndx = 0;
544 	APlist		*alp;
545 	Rt_map		*lmp1;
546 
547 	(void) printf(MSG_ORIG(MSG_STR_NL));
548 
549 	while ((lmp1 = sort->s_lmpa[ndx++]) != NULL) {
550 		static const char	*ffmt, *cfmt = 0, *sfmt = 0;
551 		Bnd_desc		*bdp;
552 		Aliste			idx1;
553 
554 		if ((INIT(lmp1) == 0) || (FLAGS(lmp1) & FLG_RT_INITCALL))
555 			continue;
556 
557 		if (sfmt == 0)
558 			sfmt = MSG_INTL(MSG_LDD_INIT_FMT_02);
559 
560 #ifndef	LD_BREADTH_DISABLED
561 		if (rtld_flags & RT_FL_BREADTH) {
562 			(void) printf(sfmt, NAME(lmp1));
563 			continue;
564 		}
565 #endif
566 		/*
567 		 * If the only component on the strongly connected list is
568 		 * this link-map, then there are no dependencies.
569 		 */
570 		if ((alp = trace_find_scc(sort, lmp1)) == NULL) {
571 			(void) printf(sfmt, NAME(lmp1));
572 			continue;
573 		}
574 
575 		/*
576 		 * Establish message formats for cyclic dependencies.
577 		 */
578 		if (cfmt == 0) {
579 			cfmt = MSG_INTL(MSG_LDD_INIT_FMT_03);
580 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
581 		}
582 
583 		(void) printf(cfmt, NAME(lmp1), CYCGROUP(lmp1));
584 
585 		for (APLIST_TRAVERSE(CALLERS(lmp1), idx1, bdp)) {
586 			Rt_map	*lmp3, *lmp2 = bdp->b_caller;
587 			Aliste	idx2;
588 
589 			for (APLIST_TRAVERSE(alp, idx2, lmp3)) {
590 				if (lmp2 != lmp3)
591 					continue;
592 
593 				(void) printf(ffmt, NAME(lmp3));
594 			}
595 		}
596 	}
597 }
598 
599 /*
600  * A reverse ordered list (for .init's) contains INITFIRST elements.  Move each
601  * of these elements to the front of the list.
602  */
603 static void
604 r_initfirst(Sort * sort, int end)
605 {
606 	Rt_map *	tlmp;
607 	int		bgn, ifst, lifst = 0;
608 
609 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
610 		for (ifst = lifst; ifst <= end; ifst++) {
611 			tlmp = sort->s_lmpa[ifst];
612 
613 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
614 				continue;
615 
616 			/*
617 			 * If the INITFIRST element is already at the front of
618 			 * the list leave it there.
619 			 */
620 			if (ifst == bgn) {
621 				lifst = ifst + 1;
622 				break;
623 			}
624 
625 			/*
626 			 * Move the elements from the front of the list up to
627 			 * the INITFIRST element, back one position.
628 			 */
629 			(void) memmove(&sort->s_lmpa[bgn + 1],
630 			    &sort->s_lmpa[bgn],
631 			    ((ifst - bgn) * sizeof (Rt_map *)));
632 
633 			/*
634 			 * Insert INITFIRST element at the front of the list.
635 			 */
636 			sort->s_lmpa[bgn] = tlmp;
637 			lifst = ifst + 1;
638 			break;
639 		}
640 	}
641 }
642 
643 /*
644  * A forward ordered list (for .fini's) contains INITFIRST elements.  Move each
645  * of these elements to the front of the list.
646  */
647 static void
648 f_initfirst(Sort * sort, int end)
649 {
650 	Rt_map *	tlmp;
651 	int		bgn, ifst, lifst = 0;
652 
653 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
654 		for (ifst = lifst; ifst <= end; ifst++) {
655 			tlmp = sort->s_lmpa[ifst];
656 
657 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
658 				continue;
659 
660 			/*
661 			 * If the INITFIRST element is already at the end of
662 			 * the list leave it there.
663 			 */
664 			if (ifst == end)
665 				break;
666 
667 			/*
668 			 * Move the elements from after the INITFIRST element
669 			 * up to the back of the list, up one position.
670 			 */
671 			(void) memmove(&sort->s_lmpa[ifst],
672 			    &sort->s_lmpa[ifst + 1],
673 			    ((end - ifst) * sizeof (Rt_map *)));
674 
675 			/*
676 			 * Insert INITFIRST element at the back of the list.
677 			 */
678 			sort->s_lmpa[end--] = tlmp;
679 			lifst = ifst;
680 			break;
681 		}
682 	}
683 }
684 
685 /*
686  * Determine whether .init or .fini processing is required.
687  */
688 static int
689 initorfini(Lm_list *lml, Rt_map *lmp, int flag, Sort *sort)
690 {
691 	if (flag & RT_SORT_REV) {
692 		/*
693 		 * For .init processing, only collect objects that have been
694 		 * relocated and haven't already been collected.
695 		 */
696 		if ((FLAGS(lmp) & (FLG_RT_RELOCED | FLG_RT_INITCLCT)) !=
697 		    FLG_RT_RELOCED)
698 			return (0);
699 
700 		if (dep_visit(lml, 0, 0, lmp, sort, flag) == -1)
701 			return (1);
702 
703 	} else if (!(flag & RT_SORT_DELETE) || (FLAGS(lmp) & FLG_RT_DELETE)) {
704 		/*
705 		 * Only collect objects that have had their .init collected,
706 		 * and haven't already been .fini collected.
707 		 */
708 		if (!((FLAGS(lmp) & (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
709 		    (FLG_RT_INITCLCT)))
710 			return (0);
711 
712 		if (dep_visit(lml, 0, 0, lmp, sort, flag) == -1)
713 			return (1);
714 	}
715 	return (0);
716 }
717 
718 /*
719  * Sort the dependency
720  */
721 Rt_map **
722 tsort(Rt_map *lmp, int num, int flag)
723 {
724 	Rt_map *	_lmp;
725 	Lm_list *	lml = LIST(lmp);
726 	Word		init = lml->lm_flags & LML_FLG_TRC_INIT;
727 	Sort		sort = { 0 };
728 
729 	if (num == 0)
730 		return (0);
731 
732 	/*
733 	 * Prior to tsorting any .init sections, insure that the `environ'
734 	 * symbol is initialized for this link-map list.
735 	 */
736 	if ((flag & RT_SORT_REV) && ((lml->lm_flags &
737 	    (LML_FLG_TRC_ENABLE | LML_FLG_ENVIRON)) == 0))
738 		set_environ(lml);
739 
740 	/*
741 	 * Allocate memory for link-map list array.  Calloc the array to insure
742 	 * all elements are zero, we might find that no objects need processing.
743 	 */
744 	sort.s_lmp = lmp;
745 	sort.s_num = num + 1;
746 	if ((sort.s_lmpa = calloc(sort.s_num, sizeof (Rt_map *))) == NULL)
747 		return ((Rt_map **)S_ERROR);
748 
749 #ifndef	LD_BREADTH_DISABLED
750 	/*
751 	 * A breadth first search is easy, simply add each object to the
752 	 * link-map array.
753 	 */
754 	if (rtld_flags & RT_FL_BREADTH) {
755 		if (flag & RT_SORT_REV)
756 			rb_visit(lmp, &sort);
757 		else
758 			fb_visit(lmp, &sort, flag);
759 
760 		/*
761 		 * If tracing .init sections (only meaningful for RT_SORT_REV)
762 		 * print out the sorted dependencies.
763 		 */
764 		if (init)
765 			trace_sort(&sort);
766 
767 		return (sort.s_lmpa);
768 	}
769 #endif
770 	/*
771 	 * We need to topologically sort the dependencies.
772 	 */
773 	if ((sort.s_stack = malloc(sort.s_num * sizeof (Rt_map *))) == NULL)
774 		return ((Rt_map **)S_ERROR);
775 
776 	/*
777 	 * Determine where to start searching for tsort() candidates.  Any call
778 	 * to tsort() for .init processing is passed the link-map from which to
779 	 * start searching.  However, if new objects have dependencies on
780 	 * existing objects, or existing objects have been promoted (RTLD_LAZY
781 	 * to RTLD_NOW), then start searching at the head of the link-map list.
782 	 * These previously loaded objects will have been tagged for inclusion
783 	 * in this tsort() pass.  They still remain on an existing tsort() list,
784 	 * which must have been prempted for control to have arrived here.
785 	 * However, they will be ignored when encountered on any previous
786 	 * tsort() list if their .init has already been called.
787 	 */
788 	if (lml->lm_flags & LML_FLG_OBJREEVAL)
789 		_lmp = lml->lm_head;
790 	else
791 		_lmp = lmp;
792 
793 	DBG_CALL(Dbg_file_bindings(_lmp, flag));
794 	lml->lm_flags &=
795 	    ~(LML_FLG_OBJREEVAL | LML_FLG_OBJADDED | LML_FLG_OBJDELETED);
796 
797 	/*
798 	 * If interposers exist, inspect these objects first.
799 	 *
800 	 * Interposers can provide implicit dependencies - for example, an
801 	 * application that has a dependency on libumem will caused any other
802 	 * dependencies of the application that use the malloc family, to
803 	 * have an implicit dependency on libumem.  However, under the default
804 	 * condition of lazy binding, these dependency relationships on libumem
805 	 * are unknown to the tsorting process (ie. a call to one of the malloc
806 	 * family has not occurred to establish the dependency).  This lack of
807 	 * dependency information makes the interposer look "standalone",
808 	 * whereas the interposers .init/.fini should be analyzed with respect
809 	 * to the dependency relationship libumem will eventually create.
810 	 *
811 	 * By inspecting interposing objects first, we are able to trigger
812 	 * their .init sections to be accounted for before any others.
813 	 * Selecting these .init sections first is important for the malloc
814 	 * libraries, as these libraries need to prepare for pthread_atfork().
815 	 * However, handling interposer libraries in this generic fashion
816 	 * should help provide for other dependency relationships that may
817 	 * exist.
818 	 */
819 	if ((lml->lm_flags & (LML_FLG_INTRPOSE | LML_FLG_INTRPOSETSORT)) ==
820 	    LML_FLG_INTRPOSE) {
821 		Rt_map	*ilmp = _lmp;
822 
823 		/*
824 		 * Unless the executable is tagged as an interposer, skip to
825 		 * the next object.
826 		 */
827 		if ((FLAGS(ilmp) & MSK_RT_INTPOSE) == 0)
828 			ilmp = NEXT_RT_MAP(ilmp);
829 
830 		for (; ilmp; ilmp = NEXT_RT_MAP(ilmp)) {
831 			if ((FLAGS(ilmp) & MSK_RT_INTPOSE) == 0)
832 				break;
833 
834 			if (initorfini(lml, ilmp, (flag | RT_SORT_INTPOSE),
835 			    &sort) != 0)
836 				return ((Rt_map **)S_ERROR);
837 		}
838 
839 		/*
840 		 * Once all interposers are processed, there is no need to
841 		 * look for interposers again.  An interposer can only
842 		 * be introduced before any relocation takes place, thus
843 		 * interposer .init's will be grabbed during the first tsort
844 		 * starting at the head of the link-map list.
845 		 *
846 		 * Interposers can't be unloaded.  Thus interposer .fini's can
847 		 * only be called during atexit() processing.  The interposer
848 		 * tsort flag is removed from each link-map list during
849 		 * atexit_fini() so that the interposers .fini sections are
850 		 * processed appropriately.
851 		 */
852 		lml->lm_flags |= LML_FLG_INTRPOSETSORT;
853 	}
854 
855 	/*
856 	 * Inspect any standard objects.
857 	 */
858 	for (; _lmp; _lmp = NEXT_RT_MAP(_lmp)) {
859 		if (FLAGS(_lmp) & MSK_RT_INTPOSE)
860 			continue;
861 
862 		if (initorfini(lml, _lmp, flag, &sort) != 0)
863 			return ((Rt_map **)S_ERROR);
864 	}
865 
866 	/*
867 	 * The dependencies have been collected such that they are appropriate
868 	 * for an .init order, for .fini order reverse them.
869 	 */
870 	if (flag & RT_SORT_FWD) {
871 		int	bgn = 0, end = sort.s_lndx - 1;
872 
873 		while (bgn < end) {
874 			Rt_map *	tlmp = sort.s_lmpa[end];
875 
876 			sort.s_lmpa[end] = sort.s_lmpa[bgn];
877 			sort.s_lmpa[bgn] = tlmp;
878 
879 			bgn++, end--;
880 		}
881 	}
882 
883 	/*
884 	 * If INITFIRST objects have been collected then move them to the front
885 	 * or end of the list as appropriate.
886 	 */
887 	if (sort.s_initfirst) {
888 		if (flag & RT_SORT_REV)
889 			r_initfirst(&sort, sort.s_lndx - 1);
890 		else
891 			f_initfirst(&sort, sort.s_lndx - 1);
892 	}
893 
894 	/*
895 	 * If tracing .init sections (only meaningful for RT_SORT_REV), print
896 	 * out the sorted dependencies.
897 	 */
898 	if (init)
899 		trace_sort(&sort);
900 
901 	/*
902 	 * Clean any temporary structures prior to return.
903 	 */
904 	if (sort.s_stack)
905 		free(sort.s_stack);
906 
907 	if (sort.s_queue) {
908 		Aliste idx;
909 		Rt_map	*lmp2;
910 
911 		/*
912 		 * Traverse the link-maps collected on the sort queue and
913 		 * delete the depth index.  These link-maps may be traversed
914 		 * again to sort other components either for inits, and almost
915 		 * certainly for .finis.
916 		 */
917 		for (APLIST_TRAVERSE(sort.s_queue, idx, lmp2))
918 			IDX(lmp2) = 0;
919 
920 		free(sort.s_queue);
921 	}
922 
923 	if (sort.s_scc) {
924 		Aliste	idx;
925 		APlist	*alp;
926 
927 		for (APLIST_TRAVERSE(sort.s_scc, idx, alp))
928 			free(alp);
929 		free(sort.s_scc);
930 	}
931 
932 	/*
933 	 * The caller is responsible for freeing the sorted link-map list once
934 	 * the associated .init/.fini's have been fired.
935 	 */
936 	DBG_CALL(Dbg_util_nl(lml, DBG_NL_STD));
937 	return (sort.s_lmpa);
938 }
939