xref: /illumos-gate/usr/src/cmd/sgs/rtld/common/tsort.c (revision 6a634c9d)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright (c) 1996, 2010, Oracle and/or its affiliates. All rights reserved.
24  */
25 
26 /*
27  * Utilities to handle shared object dependency graph.
28  *
29  * The algorithms used in this file are taken from the following book:
30  *	Algorithms in C
31  *		Robert Sedgewick
32  *		Addison-Wesley Publishing company
33  *		ISBN 0-201-51425-7
34  * 	From the following chapters:
35  *		Chapter 29 Elementary Graph Algorithms
36  *		Chapter 32 Directed Graph
37  */
38 
39 #include	<sys/types.h>
40 #include	<stdarg.h>
41 #include	<stdio.h>
42 #include	<dlfcn.h>
43 #include	<signal.h>
44 #include	<locale.h>
45 #include	<string.h>
46 #include	<libintl.h>
47 #include	<debug.h>
48 #include	"_rtld.h"
49 #include	"msg.h"
50 
51 /*
52  * Structure for maintaining sorting state.
53  */
54 typedef struct {
55 	Rt_map		**s_lmpa;	/* link-map[] (returned to caller) */
56 	Rt_map		*s_lmp;		/* originating link-map */
57 	Rt_map		**s_stack;	/* strongly connected component stack */
58 	APlist 		*s_scc;		/* cyclic list */
59 	APlist		*s_queue;	/* depth queue for cyclic components */
60 	int		s_sndx;		/* present stack index */
61 	int 		s_lndx;		/* present link-map index */
62 	int		s_num;		/* number of objects to sort */
63 	int		s_initfirst;	/* no. of INITFIRST entries */
64 } Sort;
65 
66 #define	AL_CNT_SCC	10
67 
68 /*
69  * qsort(3c) comparison function.
70  */
71 static int
compare(const void * lmpp1,const void * lmpp2)72 compare(const void *lmpp1, const void *lmpp2)
73 {
74 	Rt_map	*lmp1 = *((Rt_map **)lmpp1);
75 	Rt_map	*lmp2 = *((Rt_map **)lmpp2);
76 
77 	if (IDX(lmp1) > IDX(lmp2))
78 		return (-1);
79 	if (IDX(lmp1) < IDX(lmp2))
80 		return (1);
81 	return (0);
82 }
83 
84 /*
85  * This routine is called when a cyclic dependency is detected between strongly
86  * connected components.  The nodes within the cycle are reverse breadth-first
87  * sorted.
88  */
89 static int
sort_scc(Sort * sort,int fndx,int flag)90 sort_scc(Sort *sort, int fndx, int flag)
91 {
92 	static const char	*tfmt = 0, *ffmt;
93 	static int		cnt = 1;
94 	int			ndx;
95 	Rt_map			*lmp;
96 	Lm_list			*lml = LIST(sort->s_lmp);
97 	Word			lmflags = lml->lm_flags;
98 	Word			init, unref;
99 
100 	/*
101 	 * If this is the first cyclic dependency traverse the new objects that
102 	 * have been added to the link-map list and for each object establish
103 	 * a unique depth index.  We build this dynamically as we have no idea
104 	 * of the number of objects that will be inspected (logic matches that
105 	 * used by dlsym() to traverse lazy dependencies).
106 	 */
107 	if (sort->s_queue == NULL) {
108 		Aliste	idx1;
109 		Rt_map	*lmp, *lmp2;
110 
111 		lmp = sort->s_lmp;
112 		ndx = 1;
113 
114 		if (aplist_append(&sort->s_queue, lmp, sort->s_num) == NULL)
115 			return (0);
116 
117 		IDX(lmp) = ndx++;
118 
119 		for (APLIST_TRAVERSE(sort->s_queue, idx1, lmp2)) {
120 			Bnd_desc	*bdp;
121 			Aliste		idx2;
122 
123 			for (APLIST_TRAVERSE(DEPENDS(lmp2), idx2, bdp)) {
124 				Rt_map	*lmp3 = bdp->b_depend;
125 
126 				if (IDX(lmp3) || (LIST(lmp3) != lml))
127 					continue;
128 
129 				/*
130 				 * If we're .init processing and this depend-
131 				 * encies .init has been called, skip it.
132 				 */
133 				if ((flag & RT_SORT_REV) &&
134 				    (FLAGS(lmp3) & FLG_RT_INITCALL))
135 					continue;
136 
137 				if (aplist_append(&sort->s_queue, lmp3,
138 				    sort->s_num) == NULL)
139 					return (0);
140 
141 				IDX(lmp3) = ndx++;
142 			}
143 		}
144 	}
145 
146 	/*
147 	 * Sort the cyclics.
148 	 */
149 	qsort(&(sort->s_lmpa[fndx]), sort->s_lndx - fndx, sizeof (Rt_map *),
150 	    compare);
151 
152 	/*
153 	 * Under ldd -i, or debugging, print this cycle.  Under ldd -i/-U assign
154 	 * each object a group identifier so that cyclic dependent callers can
155 	 * be better traced (see trace_sort()), or analyzed for non-use.
156 	 */
157 	init = lmflags & LML_FLG_TRC_INIT;
158 	unref = lmflags & LML_FLG_TRC_UNREF;
159 	if ((init == 0) && (unref == 0) && (DBG_ENABLED == 0))
160 		return (1);
161 
162 	if (init) {
163 		if (tfmt == 0) {
164 			tfmt = MSG_INTL(MSG_LDD_INIT_FMT_01);
165 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
166 		}
167 		(void) printf(tfmt, cnt);
168 	}
169 	DBG_CALL(Dbg_util_scc_title(lml, (flag & RT_SORT_REV)));
170 
171 	/*
172 	 * Identify this cyclic group, and under ldd -i print the cycle in the
173 	 * order its components will be run.
174 	 */
175 	if (flag & RT_SORT_REV) {
176 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
177 			lmp = sort->s_lmpa[ndx];
178 			CYCGROUP(lmp) = cnt;
179 
180 			if (init)
181 				(void) printf(ffmt, NAME(lmp));
182 			DBG_CALL(Dbg_util_scc_entry(lmp, ndx));
183 		}
184 		cnt++;
185 
186 	} else if (DBG_ENABLED) {
187 		for (ndx = sort->s_lndx - 1; ndx >= fndx; ndx--) {
188 			lmp = sort->s_lmpa[ndx];
189 			DBG_CALL(Dbg_util_scc_entry(lmp, ndx));
190 		}
191 	}
192 
193 	/*
194 	 * If we're looking for unused dependencies determine if any of these
195 	 * cyclic components are referenced from outside of the cycle.
196 	 */
197 	if (unref || DBG_ENABLED) {
198 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
199 			Bnd_desc	*bdp;
200 			Aliste		idx;
201 
202 			lmp = sort->s_lmpa[ndx];
203 
204 			/*
205 			 * If this object has a handle then it can be in use by
206 			 * anyone.
207 			 */
208 			if (HANDLES(lmp))
209 				return (1);
210 
211 			/*
212 			 * Traverse this objects callers looking for outside
213 			 * references.
214 			 */
215 			for (APLIST_TRAVERSE(CALLERS(lmp), idx, bdp)) {
216 				Rt_map		*clmp = bdp->b_caller;
217 
218 				if ((bdp->b_flags & BND_REFER) == 0)
219 					continue;
220 
221 				if (CYCGROUP(lmp) != CYCGROUP(clmp))
222 					return (1);
223 			}
224 		}
225 
226 		/*
227 		 * If we're here then none of the cyclic dependents have been
228 		 * referenced from outside of the cycle, mark them as unused.
229 		 */
230 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
231 			lmp = sort->s_lmpa[ndx];
232 			FLAGS1(lmp) &= ~FL1_RT_USED;
233 		}
234 	}
235 	return (1);
236 }
237 
238 /*
239  * Take elements off of the stack and move them to the link-map array. Typically
240  * this routine just pops one strongly connected component (individual link-map)
241  * at a time.  When a cyclic dependency has been detected the stack will contain
242  * more than just the present object to process, and will trigger the later call
243  * to sort_scc() to sort these elements.
244  */
245 static int
visit(Lm_list * lml,Rt_map * lmp,Sort * sort,int flag)246 visit(Lm_list *lml, Rt_map *lmp, Sort *sort, int flag)
247 {
248 	APlist		*alp = NULL;
249 	int		num = sort->s_lndx;
250 	Word		tracing = lml->lm_flags & LML_FLG_TRC_ENABLE;
251 	Rt_map		*tlmp;
252 
253 	do {
254 		tlmp = sort->s_stack[--(sort->s_sndx)];
255 		SORTVAL(tlmp) = sort->s_num;
256 		DBG_CALL(Dbg_util_collect(tlmp, sort->s_lndx, flag));
257 		sort->s_lmpa[(sort->s_lndx)++] = tlmp;
258 
259 		if (flag & RT_SORT_REV) {
260 			/*
261 			 * Indicate the object has had its .init collected.
262 			 * Note, that regardless of the object having a .init
263 			 * the object is added to the tsort list, as it's from
264 			 * this list that any post-init flags are established.
265 			 */
266 			FLAGS(tlmp) |= FLG_RT_INITCLCT;
267 			lml->lm_init--;
268 		} else {
269 			/*
270 			 * Indicate the object has had its .fini collected.
271 			 * Note, that regardless of the object having a .fini,
272 			 * the object is added to the tsort list, as it's from
273 			 * this list that any audit_objclose() activity is
274 			 * triggered.
275 			 */
276 			FLAGS(tlmp) |= FLG_RT_FINICLCT;
277 		}
278 
279 		/*
280 		 * If tracing, save the strongly connected component.
281 		 */
282 		if (tracing && (aplist_append(&alp, tlmp,
283 		    AL_CNT_SCC) == NULL))
284 			return (0);
285 	} while (tlmp != lmp);
286 
287 	/*
288 	 * Determine if there are cyclic dependencies to process.  If so, sort
289 	 * the components, and retain them for tracing output.
290 	 */
291 	if (sort->s_lndx > (num + 1)) {
292 		if (sort_scc(sort, num, flag) == 0)
293 			return (0);
294 
295 		if (tracing && (aplist_append(&sort->s_scc, alp,
296 		    AL_CNT_SCC) == NULL))
297 			return (0);
298 	} else if (alp)
299 		free(alp);
300 
301 	return (1);
302 }
303 
304 static int
305 dep_visit(Lm_list *, Rt_map *, uint_t, Rt_map *, Sort *, int);
306 
307 static int
_dep_visit(Lm_list * lml,int min,Rt_map * clmp,Rt_map * dlmp,uint_t bflags,Sort * sort,int flag)308 _dep_visit(Lm_list *lml, int min, Rt_map *clmp, Rt_map *dlmp, uint_t bflags,
309     Sort *sort, int flag)
310 {
311 	int	_min;
312 
313 	/*
314 	 * Only collect objects that belong to the callers link-map.  Catches
315 	 * cross dependencies (filtering) to ld.so.1.
316 	 */
317 	if (LIST(dlmp) != lml)
318 		return (min);
319 
320 	/*
321 	 * Determine if this object hasn't been inspected.
322 	 */
323 	if ((_min = SORTVAL(dlmp)) == -1) {
324 		if (flag & RT_SORT_REV) {
325 			/*
326 			 * For .init processing, only collect objects that have
327 			 * been relocated and haven't already been collected.
328 			 */
329 			if ((FLAGS(dlmp) & (FLG_RT_RELOCED |
330 			    FLG_RT_INITCLCT)) != FLG_RT_RELOCED)
331 				return (min);
332 
333 			/*
334 			 * If this object contains no .init, there's no need to
335 			 * establish a dependency.
336 			 */
337 			if ((INIT(dlmp) == 0) && (INITARRAY(dlmp) == 0))
338 				return (min);
339 		} else {
340 			/*
341 			 * For .fini processing only collect objects that have
342 			 * had their .init collected, and haven't already been
343 			 * .fini collected.
344 			 */
345 			if ((FLAGS(dlmp) & (FLG_RT_INITCLCT |
346 			    FLG_RT_FINICLCT)) != FLG_RT_INITCLCT)
347 				return (min);
348 
349 			/*
350 			 * If we're deleting a subset of objects, only collect
351 			 * those marked for deletion.
352 			 */
353 			if ((flag & RT_SORT_DELETE) &&
354 			    ((FLAGS(dlmp) & FLG_RT_DELETE) == 0))
355 				return (min);
356 
357 			/*
358 			 * If this object contains no .fini, there's no need to
359 			 * establish a dependency.
360 			 */
361 			if ((FINI(dlmp) == 0) && (FINIARRAY(dlmp) == 0))
362 				return (min);
363 		}
364 
365 		/*
366 		 * Inspect this new dependency.
367 		 */
368 		if ((_min = dep_visit(lml, clmp, bflags, dlmp,
369 		    sort, flag)) == -1)
370 			return (-1);
371 	}
372 
373 	/*
374 	 * Keep track of the smallest SORTVAL that has been encountered.  If
375 	 * this value is smaller than the present object, then the dependency
376 	 * edge has cycled back to objects that have been processed earlier
377 	 * along this dependency edge.
378 	 */
379 	if (_min < min) {
380 		DBG_CALL(Dbg_util_edge_out(clmp, sort->s_stack[_min]));
381 		return (_min);
382 	} else
383 		return (min);
384 }
385 
386 /*
387  * Visit the dependencies of each object.
388  */
389 static int
dep_visit(Lm_list * lml,Rt_map * clmp,uint_t cbflags,Rt_map * lmp,Sort * sort,int flag)390 dep_visit(Lm_list *lml, Rt_map *clmp, uint_t cbflags, Rt_map *lmp, Sort *sort,
391     int flag)
392 {
393 	int 		min;
394 	Aliste		idx1;
395 	Bnd_desc	*bdp;
396 	Dyninfo		*dip;
397 
398 	min = SORTVAL(lmp) = sort->s_sndx;
399 	sort->s_stack[(sort->s_sndx)++] = lmp;
400 
401 	if (FLAGS(lmp) & FLG_RT_INITFRST)
402 		sort->s_initfirst++;
403 
404 	DBG_CALL(Dbg_util_edge_in(lml, clmp, cbflags, lmp, min, flag));
405 
406 	/*
407 	 * Traverse both explicit and implicit dependencies.
408 	 */
409 	for (APLIST_TRAVERSE(DEPENDS(lmp), idx1, bdp)) {
410 		if ((min = _dep_visit(lml, min, lmp, bdp->b_depend,
411 		    bdp->b_flags, sort, flag)) == -1)
412 			return (-1);
413 	}
414 
415 	/*
416 	 * Traverse any filtee dependencies.
417 	 */
418 	if (((dip = DYNINFO(lmp)) != NULL) && (FLAGS1(lmp) & MSK_RT_FILTER)) {
419 		uint_t	cnt, max = DYNINFOCNT(lmp);
420 
421 		for (cnt = 0; cnt < max; cnt++, dip++) {
422 			Alist	*falp;
423 			Pdesc	*pdp;
424 
425 			if (((falp = (Alist *)dip->di_info) == NULL) ||
426 			    ((dip->di_flags & MSK_DI_FILTER) == 0))
427 				continue;
428 
429 			for (ALIST_TRAVERSE(falp, idx1, pdp)) {
430 				Aliste		idx2;
431 				Grp_hdl		*ghp;
432 				Grp_desc	*gdp;
433 
434 				if ((pdp->pd_plen == 0) ||
435 				    ((ghp = (Grp_hdl *)pdp->pd_info) == NULL))
436 					continue;
437 
438 				for (ALIST_TRAVERSE(ghp->gh_depends, idx2,
439 				    gdp)) {
440 
441 					if (gdp->gd_depend == lmp)
442 						continue;
443 					if ((min = _dep_visit(lml, min, lmp,
444 					    gdp->gd_depend, BND_FILTER,
445 					    sort, flag)) == -1)
446 						return (-1);
447 				}
448 			}
449 		}
450 	}
451 
452 	/*
453 	 * Having returned to where the minimum SORTVAL is equivalent to the
454 	 * object that has just been processed, collect any dependencies that
455 	 * are available on the sorting stack.
456 	 */
457 	if (min == SORTVAL(lmp)) {
458 		if (visit(lml, lmp, sort, flag) == 0)
459 			return (-1);
460 	}
461 	return (min);
462 }
463 
464 /*
465  * Find corresponding strongly connected component structure.
466  */
467 static APlist *
trace_find_scc(Sort * sort,Rt_map * lmp)468 trace_find_scc(Sort *sort, Rt_map *lmp)
469 {
470 	APlist		*alp;
471 	Aliste		idx1;
472 
473 	for (APLIST_TRAVERSE(sort->s_scc, idx1, alp)) {
474 		Rt_map	*lmp2;
475 		Aliste	idx2;
476 
477 		for (APLIST_TRAVERSE(alp, idx2, lmp2)) {
478 			if (lmp == lmp2)
479 				return (alp);
480 		}
481 	}
482 	return (NULL);
483 }
484 
485 /*
486  * Print out the .init dependency information (ldd).
487  */
488 static void
trace_sort(Sort * sort)489 trace_sort(Sort *sort)
490 {
491 	int 		ndx = 0;
492 	APlist		*alp;
493 	Rt_map		*lmp1;
494 
495 	(void) printf(MSG_ORIG(MSG_STR_NL));
496 
497 	while ((lmp1 = sort->s_lmpa[ndx++]) != NULL) {
498 		static const char	*ffmt, *cfmt = 0, *sfmt = 0;
499 		Bnd_desc		*bdp;
500 		Aliste			idx1;
501 
502 		if ((INIT(lmp1) == 0) || (FLAGS(lmp1) & FLG_RT_INITCALL))
503 			continue;
504 
505 		if (sfmt == 0)
506 			sfmt = MSG_INTL(MSG_LDD_INIT_FMT_02);
507 
508 		/*
509 		 * If the only component on the strongly connected list is
510 		 * this link-map, then there are no dependencies.
511 		 */
512 		if ((alp = trace_find_scc(sort, lmp1)) == NULL) {
513 			(void) printf(sfmt, NAME(lmp1));
514 			continue;
515 		}
516 
517 		/*
518 		 * Establish message formats for cyclic dependencies.
519 		 */
520 		if (cfmt == 0) {
521 			cfmt = MSG_INTL(MSG_LDD_INIT_FMT_03);
522 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
523 		}
524 
525 		(void) printf(cfmt, NAME(lmp1), CYCGROUP(lmp1));
526 
527 		for (APLIST_TRAVERSE(CALLERS(lmp1), idx1, bdp)) {
528 			Rt_map	*lmp3, *lmp2 = bdp->b_caller;
529 			Aliste	idx2;
530 
531 			for (APLIST_TRAVERSE(alp, idx2, lmp3)) {
532 				if (lmp2 != lmp3)
533 					continue;
534 
535 				(void) printf(ffmt, NAME(lmp3));
536 			}
537 		}
538 	}
539 }
540 
541 /*
542  * A reverse ordered list (for .init's) contains INITFIRST elements.  Move each
543  * of these elements to the front of the list.
544  */
545 static void
r_initfirst(Sort * sort,int end)546 r_initfirst(Sort * sort, int end)
547 {
548 	Rt_map *tlmp;
549 	int	bgn, ifst, lifst = 0;
550 
551 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
552 		for (ifst = lifst; ifst <= end; ifst++) {
553 			tlmp = sort->s_lmpa[ifst];
554 
555 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
556 				continue;
557 
558 			/*
559 			 * If the INITFIRST element is already at the front of
560 			 * the list leave it there.
561 			 */
562 			if (ifst == bgn) {
563 				lifst = ifst + 1;
564 				break;
565 			}
566 
567 			/*
568 			 * Move the elements from the front of the list up to
569 			 * the INITFIRST element, back one position.
570 			 */
571 			(void) memmove(&sort->s_lmpa[bgn + 1],
572 			    &sort->s_lmpa[bgn],
573 			    ((ifst - bgn) * sizeof (Rt_map *)));
574 
575 			/*
576 			 * Insert INITFIRST element at the front of the list.
577 			 */
578 			sort->s_lmpa[bgn] = tlmp;
579 			lifst = ifst + 1;
580 			break;
581 		}
582 	}
583 }
584 
585 /*
586  * A forward ordered list (for .fini's) contains INITFIRST elements.  Move each
587  * of these elements to the front of the list.
588  */
589 static void
f_initfirst(Sort * sort,int end)590 f_initfirst(Sort *sort, int end)
591 {
592 	Rt_map *tlmp;
593 	int	bgn, ifst, lifst = 0;
594 
595 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
596 		for (ifst = lifst; ifst <= end; ifst++) {
597 			tlmp = sort->s_lmpa[ifst];
598 
599 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
600 				continue;
601 
602 			/*
603 			 * If the INITFIRST element is already at the end of
604 			 * the list leave it there.
605 			 */
606 			if (ifst == end)
607 				break;
608 
609 			/*
610 			 * Move the elements from after the INITFIRST element
611 			 * up to the back of the list, up one position.
612 			 */
613 			(void) memmove(&sort->s_lmpa[ifst],
614 			    &sort->s_lmpa[ifst + 1],
615 			    ((end - ifst) * sizeof (Rt_map *)));
616 
617 			/*
618 			 * Insert INITFIRST element at the back of the list.
619 			 */
620 			sort->s_lmpa[end--] = tlmp;
621 			lifst = ifst;
622 			break;
623 		}
624 	}
625 }
626 
627 /*
628  * Determine whether .init or .fini processing is required.
629  */
630 static int
initorfini(Lm_list * lml,Rt_map * lmp,int flag,Sort * sort)631 initorfini(Lm_list *lml, Rt_map *lmp, int flag, Sort *sort)
632 {
633 	if (flag & RT_SORT_REV) {
634 		/*
635 		 * For .init processing, only collect objects that have been
636 		 * relocated and haven't already been collected.
637 		 */
638 		if ((FLAGS(lmp) & (FLG_RT_RELOCED | FLG_RT_INITCLCT)) !=
639 		    FLG_RT_RELOCED)
640 			return (0);
641 
642 		if (dep_visit(lml, 0, 0, lmp, sort, flag) == -1)
643 			return (1);
644 
645 	} else if (!(flag & RT_SORT_DELETE) || (FLAGS(lmp) & FLG_RT_DELETE)) {
646 		/*
647 		 * Only collect objects that have had their .init collected,
648 		 * and haven't already been .fini collected.
649 		 */
650 		if (!((FLAGS(lmp) & (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
651 		    (FLG_RT_INITCLCT)))
652 			return (0);
653 
654 		if (dep_visit(lml, 0, 0, lmp, sort, flag) == -1)
655 			return (1);
656 	}
657 	return (0);
658 }
659 
660 /*
661  * Sort the dependency
662  */
663 Rt_map **
tsort(Rt_map * lmp,int num,int flag)664 tsort(Rt_map *lmp, int num, int flag)
665 {
666 	Rt_map	*_lmp;
667 	Lm_list	*lml = LIST(lmp);
668 	Word	init = lml->lm_flags & LML_FLG_TRC_INIT;
669 	Sort	sort = { 0 };
670 	size_t	size;
671 
672 	if (num == 0)
673 		return (0);
674 
675 	/*
676 	 * Prior to tsorting any .init sections, insure that the `environ'
677 	 * symbol is initialized for this link-map list.
678 	 */
679 	if ((flag & RT_SORT_REV) && ((lml->lm_flags &
680 	    (LML_FLG_TRC_ENABLE | LML_FLG_ENVIRON)) == 0))
681 		set_environ(lml);
682 
683 	/*
684 	 * Allocate memory for link-map list array.  Calloc the array to insure
685 	 * all elements are zero, we might find that no objects need processing.
686 	 * At the same time, allocate a stack for any topological sorting that
687 	 * might be necessary.
688 	 */
689 	sort.s_lmp = lmp;
690 	sort.s_num = num + 1;
691 	size = sort.s_num * sizeof (Rt_map *);
692 	if ((sort.s_lmpa = calloc(2, size)) == NULL)
693 		return ((Rt_map **)S_ERROR);
694 	sort.s_stack = (Rt_map **)((uintptr_t)sort.s_lmpa + size);
695 
696 	/*
697 	 * Determine where to start searching for tsort() candidates.  Any call
698 	 * to tsort() for .init processing is passed the link-map from which to
699 	 * start searching.  However, if new objects have dependencies on
700 	 * existing objects, or existing objects have been promoted (RTLD_LAZY
701 	 * to RTLD_NOW), then start searching at the head of the link-map list.
702 	 * These previously loaded objects will have been tagged for inclusion
703 	 * in this tsort() pass.  They still remain on an existing tsort() list,
704 	 * which must have been prempted for control to have arrived here.
705 	 * However, they will be ignored when encountered on any previous
706 	 * tsort() list if their .init has already been called.
707 	 */
708 	if (lml->lm_flags & LML_FLG_OBJREEVAL)
709 		_lmp = lml->lm_head;
710 	else
711 		_lmp = lmp;
712 
713 	DBG_CALL(Dbg_file_bindings(_lmp, flag));
714 	lml->lm_flags &=
715 	    ~(LML_FLG_OBJREEVAL | LML_FLG_OBJADDED | LML_FLG_OBJDELETED);
716 
717 	/*
718 	 * If interposers exist, inspect these objects first.
719 	 *
720 	 * Interposers can provide implicit dependencies - for example, an
721 	 * application that has a dependency on libumem will caused any other
722 	 * dependencies of the application that use the malloc family, to
723 	 * have an implicit dependency on libumem.  However, under the default
724 	 * condition of lazy binding, these dependency relationships on libumem
725 	 * are unknown to the tsorting process (ie. a call to one of the malloc
726 	 * family has not occurred to establish the dependency).  This lack of
727 	 * dependency information makes the interposer look "standalone",
728 	 * whereas the interposers .init/.fini should be analyzed with respect
729 	 * to the dependency relationship libumem will eventually create.
730 	 *
731 	 * By inspecting interposing objects first, we are able to trigger
732 	 * their .init sections to be accounted for before any others.
733 	 * Selecting these .init sections first is important for the malloc
734 	 * libraries, as these libraries need to prepare for pthread_atfork().
735 	 * However, handling interposer libraries in this generic fashion
736 	 * should help provide for other dependency relationships that may
737 	 * exist.
738 	 */
739 	if ((lml->lm_flags & (LML_FLG_INTRPOSE | LML_FLG_INTRPOSETSORT)) ==
740 	    LML_FLG_INTRPOSE) {
741 		Rt_map	*ilmp = _lmp;
742 
743 		/*
744 		 * Unless the executable is tagged as an interposer, skip to
745 		 * the next object.
746 		 */
747 		if ((FLAGS(ilmp) & MSK_RT_INTPOSE) == 0)
748 			ilmp = NEXT_RT_MAP(ilmp);
749 
750 		for (; ilmp; ilmp = NEXT_RT_MAP(ilmp)) {
751 			if ((FLAGS(ilmp) & MSK_RT_INTPOSE) == 0)
752 				break;
753 
754 			if (initorfini(lml, ilmp, (flag | RT_SORT_INTPOSE),
755 			    &sort) != 0)
756 				return ((Rt_map **)S_ERROR);
757 		}
758 
759 		/*
760 		 * Once all interposers are processed, there is no need to
761 		 * look for interposers again.  An interposer can only
762 		 * be introduced before any relocation takes place, thus
763 		 * interposer .init's will be grabbed during the first tsort
764 		 * starting at the head of the link-map list.
765 		 *
766 		 * Interposers can't be unloaded.  Thus interposer .fini's can
767 		 * only be called during atexit() processing.  The interposer
768 		 * tsort flag is removed from each link-map list during
769 		 * atexit_fini() so that the interposers .fini sections are
770 		 * processed appropriately.
771 		 */
772 		lml->lm_flags |= LML_FLG_INTRPOSETSORT;
773 	}
774 
775 	/*
776 	 * Inspect any standard objects.
777 	 */
778 	for (; _lmp; _lmp = NEXT_RT_MAP(_lmp)) {
779 		if (FLAGS(_lmp) & MSK_RT_INTPOSE)
780 			continue;
781 
782 		if (initorfini(lml, _lmp, flag, &sort) != 0)
783 			return ((Rt_map **)S_ERROR);
784 	}
785 
786 	/*
787 	 * The dependencies have been collected such that they are appropriate
788 	 * for an .init order, for .fini order reverse them.
789 	 */
790 	if (flag & RT_SORT_FWD) {
791 		int	bgn = 0, end = sort.s_lndx - 1;
792 
793 		while (bgn < end) {
794 			Rt_map	*tlmp = sort.s_lmpa[end];
795 
796 			sort.s_lmpa[end] = sort.s_lmpa[bgn];
797 			sort.s_lmpa[bgn] = tlmp;
798 
799 			bgn++, end--;
800 		}
801 	}
802 
803 	/*
804 	 * If INITFIRST objects have been collected then move them to the front
805 	 * or end of the list as appropriate.
806 	 */
807 	if (sort.s_initfirst) {
808 		if (flag & RT_SORT_REV)
809 			r_initfirst(&sort, sort.s_lndx - 1);
810 		else
811 			f_initfirst(&sort, sort.s_lndx - 1);
812 	}
813 
814 	/*
815 	 * If tracing .init sections (only meaningful for RT_SORT_REV), print
816 	 * out the sorted dependencies.
817 	 */
818 	if (init)
819 		trace_sort(&sort);
820 
821 	/*
822 	 * Clean any temporary structures prior to return.
823 	 */
824 	if (sort.s_queue) {
825 		Aliste idx;
826 		Rt_map	*lmp2;
827 
828 		/*
829 		 * Traverse the link-maps collected on the sort queue and
830 		 * delete the depth index.  These link-maps may be traversed
831 		 * again to sort other components either for inits, and almost
832 		 * certainly for .finis.
833 		 */
834 		for (APLIST_TRAVERSE(sort.s_queue, idx, lmp2))
835 			IDX(lmp2) = 0;
836 
837 		free(sort.s_queue);
838 	}
839 
840 	if (sort.s_scc) {
841 		Aliste	idx;
842 		APlist	*alp;
843 
844 		for (APLIST_TRAVERSE(sort.s_scc, idx, alp))
845 			free(alp);
846 		free(sort.s_scc);
847 	}
848 
849 	/*
850 	 * The caller is responsible for freeing the sorted link-map list once
851 	 * the associated .init/.fini's have been fired.
852 	 */
853 	DBG_CALL(Dbg_file_bindings_done(lml));
854 	return (sort.s_lmpa);
855 }
856