1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
|
# This patch changes all cases where the ARM assembler mov pc,rx
# instructions are used to ensure that the thumb/arm interwork change of
# process more works - in essence mov pc,rx needs to become bx rc.
#
# The ldr pc or ldm rx, {pc} instructions are not changed - this is
# fine on ARM >=v5 but will fail to restore thumb mode on ARM v4T,
# i.e. this code will not provide support for thumb on ARM v4T.
#
# One mov pc is left in resolve.S, this is fixed in a different patch -
# thumb-resolve.patch
#
# The changes are protected by __THUMB_INTERWORK__ - the original
# mov instruction will work on newer architectures and is required on
# arch v4 (not v4t) and earlier - those which did not support thumb -
# so this is safe. See gcc lib1asmfuncs for a more exact test.
#
--- uClibc-0.9.28/.pc/thumb-mov-pc-bx.patch/ldso/ldso/arm/dl-startup.h 2005-08-17 15:49:41.000000000 -0700
+++ uClibc-0.9.28/ldso/ldso/arm/dl-startup.h 2005-09-16 23:38:34.266546180 -0700
@@ -4,6 +4,7 @@
* Copyright (C) 2000-2004 by Erik Andersen <andersen@codepoet.org>
*/
+#if ! (defined __thumb__)
asm(
" .text\n"
" .globl _start\n"
@@ -40,7 +41,11 @@
" ldr r0, .L_FINI_PROC\n"
" ldr r0, [sl, r0]\n"
" @ jump to the user_s entry point\n"
+#if defined(__THUMB_INTERWORK__)
+ " bx r6\n"
+#else
" mov pc, r6\n"
+#endif
".L_GET_GOT:\n"
" .word _GLOBAL_OFFSET_TABLE_ - .L_GOT_GOT - 4\n"
".L_SKIP_ARGS:\n"
@@ -51,6 +56,70 @@
" .size _start,.-_start\n"
".previous\n"
);
+#else
+asm(
+ " .text\n"
+ " .arm\n"
+ " .globl _start\n"
+ " .type _start,%function\n"
+ "_start:\n"
+ " @ dumb: can't persuade the linker to make the start address\n"
+ " @ odd, so use an arm function and change to thumb (_dl_start\n"
+ " @ is thumb)\n"
+ " adr r0, __dl_thumb_start+1\n"
+ " bx r0\n"
+ "\n\n"
+ " .thumb\n"
+ " .globl __dl_thumb_start\n"
+ " .thumb_func\n"
+ " .type __dl_thumb_start,%function\n"
+ "__dl_thumb_start:\n"
+ " @ at start time, all the args are on the stack\n"
+ " mov r0, sp\n"
+ " bl _dl_start\n"
+ " @ returns user entry point in r0\n"
+ " mov r6, r0\n"
+ " @ we are PIC code, so get global offset table\n"
+ " ldr r7, .L_GET_GOT\n"
+ ".L_GOT_GOT:\n"
+ " add r7, pc\n"
+ " @ See if we were run as a command with the executable file\n"
+ " @ name as an extra leading argument.\n"
+ " ldr r4, .L_SKIP_ARGS\n"
+ " ldr r4, [r7, r4]\n"
+ " @ get the original arg count\n"
+ " ldr r1, [sp]\n"
+ " @ subtract _dl_skip_args from it\n"
+ " sub r1, r1, r4\n"
+ " @ adjust the stack pointer to skip them\n"
+ " lsl r4, r4, #2\n"
+ " add sp, r4\n"
+ " @ get the argv address\n"
+ " add r2, sp, #4\n"
+ " @ store the new argc in the new stack location\n"
+ " str r1, [sp]\n"
+ " @ compute envp\n"
+ " lsl r3, r1, #2\n"
+ " add r3, r3, r2\n"
+ " add r3, #4\n"
+ "\n\n"
+ " @ load the finalizer function\n"
+ " ldr r0, .L_FINI_PROC\n"
+ " ldr r0, [r7, r0]\n"
+ " @ jump to the user_s entry point\n"
+ " bx r6\n"
+ "\n\n"
+ ".L_GET_GOT:\n"
+ " .word _GLOBAL_OFFSET_TABLE_ - .L_GOT_GOT - 4\n"
+ ".L_SKIP_ARGS:\n"
+ " .word _dl_skip_args(GOTOFF)\n"
+ ".L_FINI_PROC:\n"
+ " .word _dl_fini(GOT)\n"
+ "\n\n"
+ " .size _start,.-_start\n"
+ ".previous\n"
+);
+#endif
/* Get a pointer to the argv array. On many platforms this can be just
--- uClibc/.pc/thumb-mov-pc-bx.patch/ldso/ldso/arm/dl-sysdep.h 2006-01-19 19:03:40.000000000 -0800
+++ uClibc/ldso/ldso/arm/dl-sysdep.h 2006-01-20 01:25:26.026727661 -0800
@@ -85,7 +85,25 @@ elf_machine_load_address (void)
extern void __dl_start asm ("_dl_start");
Elf32_Addr got_addr = (Elf32_Addr) &__dl_start;
Elf32_Addr pcrel_addr;
+#if !defined __thumb__
asm ("adr %0, _dl_start" : "=r" (pcrel_addr));
+#else
+ int tmp;
+ /* The above adr will not work on thumb because it
+ * is negative. The only safe way is to temporarily
+ * swap to arm.
+ */
+ asm( ".align 2\n"
+ " bx pc\n"
+ " nop \n"
+ " .arm \n"
+ " adr %0, _dl_start\n"
+ " .align 2\n"
+ " orr %1, pc, #1\n"
+ " bx %1\n"
+ " .force_thumb\n"
+ : "=r" (pcrel_addr), "=&r" (tmp));
+#endif
return pcrel_addr - got_addr;
}
--- uClibc/libc/sysdeps/linux/arm/clone.S.orig 2006-01-19 19:03:55.000000000 -0800
+++ uClibc/libc/sysdeps/linux/arm/clone.S 2006-01-19 19:30:30.969758100 -0800
@@ -52,7 +52,11 @@ clone:
swi __NR_clone
movs a1, a1
blt __error
- movne pc, lr
+#if defined(__THUMB_INTERWORK__)
+ bxne lr
+#else
+ movne pc, lr
+#endif
@ pick the function arg and call address off the stack and execute
ldr r0, [sp, #4]
--- uClibc/libc/sysdeps/linux/arm/vfork.S.orig 2006-01-19 19:03:55.000000000 -0800
+++ uClibc/libc/sysdeps/linux/arm/vfork.S 2006-01-19 19:33:09.533757874 -0800
@@ -25,7 +25,11 @@ vfork:
#ifdef __NR_vfork
swi __NR_vfork
cmn r0, #4096
+#if defined(__THUMB_INTERWORK__)
+ bxcc lr
+#else
movcc pc, lr
+#endif
/* Check if vfork even exists. */
ldr r1, =-ENOSYS
@@ -38,7 +42,11 @@ vfork:
cmn r0, #4096
/* Syscall worked. Return to child/parent */
+#if defined(__THUMB_INTERWORK__)
+ bxcc lr
+#else
movcc pc, lr
+#endif
__error:
b __syscall_error
|