|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
|
2 // Use of this source code is governed by a BSD-style license that can be |
|
3 // found in the LICENSE file. |
|
4 |
|
5 // Implementation of PreamblePatcher |
|
6 |
|
7 #include "sandbox/win/src/sidestep/preamble_patcher.h" |
|
8 |
|
9 #include "sandbox/win/src/sandbox_nt_util.h" |
|
10 #include "sandbox/win/src/sidestep/mini_disassembler.h" |
|
11 |
|
12 // Definitions of assembly statements we need |
|
13 #define ASM_JMP32REL 0xE9 |
|
14 #define ASM_INT3 0xCC |
|
15 |
|
16 namespace { |
|
17 |
|
18 // Very basic memcpy. We are copying 4 to 12 bytes most of the time, so there |
|
19 // is no attempt to optimize this code or have a general purpose function. |
|
20 // We don't want to call the crt from this code. |
|
21 inline void* RawMemcpy(void* destination, const void* source, size_t bytes) { |
|
22 const char* from = reinterpret_cast<const char*>(source); |
|
23 char* to = reinterpret_cast<char*>(destination); |
|
24 |
|
25 for (size_t i = 0; i < bytes ; i++) |
|
26 to[i] = from[i]; |
|
27 |
|
28 return destination; |
|
29 } |
|
30 |
|
31 // Very basic memset. We are filling 1 to 7 bytes most of the time, so there |
|
32 // is no attempt to optimize this code or have a general purpose function. |
|
33 // We don't want to call the crt from this code. |
|
34 inline void* RawMemset(void* destination, int value, size_t bytes) { |
|
35 char* to = reinterpret_cast<char*>(destination); |
|
36 |
|
37 for (size_t i = 0; i < bytes ; i++) |
|
38 to[i] = static_cast<char>(value); |
|
39 |
|
40 return destination; |
|
41 } |
|
42 |
|
43 } // namespace |
|
44 |
|
45 #define ASSERT(a, b) DCHECK_NT(a) |
|
46 |
|
47 namespace sidestep { |
|
48 |
|
49 SideStepError PreamblePatcher::RawPatchWithStub( |
|
50 void* target_function, |
|
51 void* replacement_function, |
|
52 unsigned char* preamble_stub, |
|
53 size_t stub_size, |
|
54 size_t* bytes_needed) { |
|
55 if ((NULL == target_function) || |
|
56 (NULL == replacement_function) || |
|
57 (NULL == preamble_stub)) { |
|
58 ASSERT(false, (L"Invalid parameters - either pTargetFunction or " |
|
59 L"pReplacementFunction or pPreambleStub were NULL.")); |
|
60 return SIDESTEP_INVALID_PARAMETER; |
|
61 } |
|
62 |
|
63 // TODO(V7:joi) Siggi and I just had a discussion and decided that both |
|
64 // patching and unpatching are actually unsafe. We also discussed a |
|
65 // method of making it safe, which is to freeze all other threads in the |
|
66 // process, check their thread context to see if their eip is currently |
|
67 // inside the block of instructions we need to copy to the stub, and if so |
|
68 // wait a bit and try again, then unfreeze all threads once we've patched. |
|
69 // Not implementing this for now since we're only using SideStep for unit |
|
70 // testing, but if we ever use it for production code this is what we |
|
71 // should do. |
|
72 // |
|
73 // NOTE: Stoyan suggests we can write 8 or even 10 bytes atomically using |
|
74 // FPU instructions, and on newer processors we could use cmpxchg8b or |
|
75 // cmpxchg16b. So it might be possible to do the patching/unpatching |
|
76 // atomically and avoid having to freeze other threads. Note though, that |
|
77 // doing it atomically does not help if one of the other threads happens |
|
78 // to have its eip in the middle of the bytes you change while you change |
|
79 // them. |
|
80 unsigned char* target = reinterpret_cast<unsigned char*>(target_function); |
|
81 |
|
82 // Let's disassemble the preamble of the target function to see if we can |
|
83 // patch, and to see how much of the preamble we need to take. We need 5 |
|
84 // bytes for our jmp instruction, so let's find the minimum number of |
|
85 // instructions to get 5 bytes. |
|
86 MiniDisassembler disassembler; |
|
87 unsigned int preamble_bytes = 0; |
|
88 while (preamble_bytes < 5) { |
|
89 InstructionType instruction_type = |
|
90 disassembler.Disassemble(target + preamble_bytes, &preamble_bytes); |
|
91 if (IT_JUMP == instruction_type) { |
|
92 ASSERT(false, (L"Unable to patch because there is a jump instruction " |
|
93 L"in the first 5 bytes.")); |
|
94 return SIDESTEP_JUMP_INSTRUCTION; |
|
95 } else if (IT_RETURN == instruction_type) { |
|
96 ASSERT(false, (L"Unable to patch because function is too short")); |
|
97 return SIDESTEP_FUNCTION_TOO_SMALL; |
|
98 } else if (IT_GENERIC != instruction_type) { |
|
99 ASSERT(false, (L"Disassembler encountered unsupported instruction " |
|
100 L"(either unused or unknown")); |
|
101 return SIDESTEP_UNSUPPORTED_INSTRUCTION; |
|
102 } |
|
103 } |
|
104 |
|
105 if (NULL != bytes_needed) |
|
106 *bytes_needed = preamble_bytes + 5; |
|
107 |
|
108 // Inv: preamble_bytes is the number of bytes (at least 5) that we need to |
|
109 // take from the preamble to have whole instructions that are 5 bytes or more |
|
110 // in size total. The size of the stub required is cbPreamble + size of |
|
111 // jmp (5) |
|
112 if (preamble_bytes + 5 > stub_size) { |
|
113 NOTREACHED_NT(); |
|
114 return SIDESTEP_INSUFFICIENT_BUFFER; |
|
115 } |
|
116 |
|
117 // First, copy the preamble that we will overwrite. |
|
118 RawMemcpy(reinterpret_cast<void*>(preamble_stub), |
|
119 reinterpret_cast<void*>(target), preamble_bytes); |
|
120 |
|
121 // Now, make a jmp instruction to the rest of the target function (minus the |
|
122 // preamble bytes we moved into the stub) and copy it into our preamble-stub. |
|
123 // find address to jump to, relative to next address after jmp instruction |
|
124 #pragma warning(push) |
|
125 #pragma warning(disable:4244) |
|
126 // This assignment generates a warning because it is 32 bit specific. |
|
127 int relative_offset_to_target_rest |
|
128 = ((reinterpret_cast<unsigned char*>(target) + preamble_bytes) - |
|
129 (preamble_stub + preamble_bytes + 5)); |
|
130 #pragma warning(pop) |
|
131 // jmp (Jump near, relative, displacement relative to next instruction) |
|
132 preamble_stub[preamble_bytes] = ASM_JMP32REL; |
|
133 // copy the address |
|
134 RawMemcpy(reinterpret_cast<void*>(preamble_stub + preamble_bytes + 1), |
|
135 reinterpret_cast<void*>(&relative_offset_to_target_rest), 4); |
|
136 |
|
137 // Inv: preamble_stub points to assembly code that will execute the |
|
138 // original function by first executing the first cbPreamble bytes of the |
|
139 // preamble, then jumping to the rest of the function. |
|
140 |
|
141 // Overwrite the first 5 bytes of the target function with a jump to our |
|
142 // replacement function. |
|
143 // (Jump near, relative, displacement relative to next instruction) |
|
144 target[0] = ASM_JMP32REL; |
|
145 |
|
146 // Find offset from instruction after jmp, to the replacement function. |
|
147 #pragma warning(push) |
|
148 #pragma warning(disable:4244) |
|
149 int offset_to_replacement_function = |
|
150 reinterpret_cast<unsigned char*>(replacement_function) - |
|
151 reinterpret_cast<unsigned char*>(target) - 5; |
|
152 #pragma warning(pop) |
|
153 // complete the jmp instruction |
|
154 RawMemcpy(reinterpret_cast<void*>(target + 1), |
|
155 reinterpret_cast<void*>(&offset_to_replacement_function), 4); |
|
156 // Set any remaining bytes that were moved to the preamble-stub to INT3 so |
|
157 // as not to cause confusion (otherwise you might see some strange |
|
158 // instructions if you look at the disassembly, or even invalid |
|
159 // instructions). Also, by doing this, we will break into the debugger if |
|
160 // some code calls into this portion of the code. If this happens, it |
|
161 // means that this function cannot be patched using this patcher without |
|
162 // further thought. |
|
163 if (preamble_bytes > 5) { |
|
164 RawMemset(reinterpret_cast<void*>(target + 5), ASM_INT3, |
|
165 preamble_bytes - 5); |
|
166 } |
|
167 |
|
168 // Inv: The memory pointed to by target_function now points to a relative |
|
169 // jump instruction that jumps over to the preamble_stub. The preamble |
|
170 // stub contains the first stub_size bytes of the original target |
|
171 // function's preamble code, followed by a relative jump back to the next |
|
172 // instruction after the first cbPreamble bytes. |
|
173 |
|
174 return SIDESTEP_SUCCESS; |
|
175 } |
|
176 |
|
177 }; // namespace sidestep |
|
178 |
|
179 #undef ASSERT |