aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64/lib/mcount.S
blob: 7735a7a60533fa225256ad0bd2d94a00be26033a (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
/*
 * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com)
 *
 * This file implements mcount(), which is used to collect profiling data.
 * This can also be tweaked for kernel stack overflow detection.
 */

#include <linux/linkage.h>

#include <asm/ptrace.h>
#include <asm/thread_info.h>

/*
 * This is the main variant and is called by C code.  GCC's -pg option
 * automatically instruments every C function with a call to this.
 */

#ifdef CONFIG_STACK_DEBUG

#define OVSTACKSIZE	4096		/* lets hope this is enough */

	.data
	.align		8
panicstring:
	.asciz		"Stack overflow\n"
	.align		8
ovstack:
	.skip		OVSTACKSIZE
#endif
	.text
	.align		32
	.globl		_mcount
	.type		_mcount,#function
	.globl		mcount
	.type		mcount,#function
_mcount:
mcount:
#ifdef CONFIG_STACK_DEBUG
	/*
	 * Check whether %sp is dangerously low.
	 */
	ldub		[%g6 + TI_FPDEPTH], %g1
	srl		%g1, 1, %g3
	add		%g3, 1, %g3
	sllx		%g3, 8, %g3			! each fpregs frame is 256b
	add		%g3, 192, %g3
	add		%g6, %g3, %g3			! where does task_struct+frame end?
	sub		%g3, STACK_BIAS, %g3
	cmp		%sp, %g3
	bg,pt		%xcc, 1f
	 sethi		%hi(panicstring), %g3
	sethi		%hi(ovstack), %g7		! cant move to panic stack fast enough
	 or		%g7, %lo(ovstack), %g7
	add		%g7, OVSTACKSIZE, %g7
	sub		%g7, STACK_BIAS, %g7
	mov		%g7, %sp
	call		prom_printf
	 or		%g3, %lo(panicstring), %o0
	call		prom_halt
	 nop
1:
#endif
#ifdef CONFIG_FTRACE
#ifdef CONFIG_DYNAMIC_FTRACE
	mov		%o7, %o0
	.globl		mcount_call
mcount_call:
	call		ftrace_stub
	 mov		%o0, %o7
#else
	sethi		%hi(ftrace_trace_function), %g1
	sethi		%hi(ftrace_stub), %g2
	ldx		[%g1 + %lo(ftrace_trace_function)], %g1
	or		%g2, %lo(ftrace_stub), %g2
	cmp		%g1, %g2
	be,pn		%icc, 1f
	 mov		%i7, %o1
	jmpl		%g1, %g0
	 mov		%o7, %o0
	/* not reached */
1:
#endif
#endif
	retl
	 nop
	.size		_mcount,.-_mcount
	.size		mcount,.-mcount

#ifdef CONFIG_FTRACE
	.globl		ftrace_stub
	.type		ftrace_stub,#function
ftrace_stub:
	retl
	 nop
	.size		ftrace_stub,.-ftrace_stub
#ifdef CONFIG_DYNAMIC_FTRACE
	.globl		ftrace_caller
	.type		ftrace_caller,#function
ftrace_caller:
	mov		%i7, %o1
	mov		%o7, %o0
	.globl		ftrace_call
ftrace_call:
	call		ftrace_stub
	 mov		%o0, %o7
	retl
	 nop
	.size		ftrace_caller,.-ftrace_caller
#endif
#endif