Comparing BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long) in assumed vs ignored

Files changed (1) hide show
  1. balb_controlmanager.t/{0.assume.s → 0.none.s} +318 -323
balb_controlmanager.t/{0.assume.s → 0.none.s} RENAMED
@@ -1,333 +1,328 @@
1
- 00000000004350a0 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)>:
2
  0000000000000000: 01 pushq %rbp
3
  0000000000000001: 02 pushq %r15
4
  0000000000000003: 02 pushq %r14
5
  0000000000000005: 02 pushq %r13
6
  0000000000000007: 02 pushq %r12
7
  0000000000000009: 01 pushq %rbx
8
- 000000000000000a: 04 subq $104, %rsp
9
  000000000000000e: 03 movq %rdx, %rbp
10
  0000000000000011: 05 movq %rdi, 8(%rsp)
11
- 0000000000000016: 07 movzbl 296(%rdi), %ecx
12
- 000000000000001d: 04 leaq (%rcx,%rdx), %rbx
13
  0000000000000021: 07 cmpq $191, %rbx
14
- 0000000000000028: 02 ja 0x4350f0 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x50>
15
  000000000000002a: 05 movq 8(%rsp), %r14
16
- 000000000000002f: 03 addq %r14, %rcx
17
- 0000000000000032: 03 movq %rcx, %rdi
18
- 0000000000000035: 03 movq %rbp, %rdx
19
- 0000000000000038: 05 callq 0x404af0 <memcpy@plt>
20
- 000000000000003d: 07 addq %rbp, 288(%r14)
21
- 0000000000000044: 07 movb %bl, 296(%r14)
22
- 000000000000004b: 05 jmp 0x4355cf <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x52f>
23
- 0000000000000050: 05 movq 8(%rsp), %rdx
24
- 0000000000000055: 07 movq 288(%rdx), %rax
25
- 000000000000005c: 07 movq 192(%rdx), %r8
26
- 0000000000000063: 07 movq 200(%rdx), %r9
27
- 000000000000006a: 06 cmpq $192, %rax
28
- 0000000000000070: 05 movq %rbp, 24(%rsp)
29
- 0000000000000075: 05 movq %rcx, 88(%rsp)
30
- 000000000000007a: 02 jae 0x435143 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0xa3>
31
- 000000000000007c: 10 movabsq $-2401053088876216593, %r11
32
- 0000000000000086: 03 movq %r8, %r10
33
- 0000000000000089: 03 movq %r9, %rbx
34
- 000000000000008c: 03 movq %r11, %rbp
35
- 000000000000008f: 03 movq %r8, %rcx
36
- 0000000000000092: 03 movq %r9, %rdx
37
- 0000000000000095: 03 movq %r11, %r12
38
- 0000000000000098: 03 movq %r8, %r13
39
- 000000000000009b: 03 movq %r9, %r14
40
- 000000000000009e: 03 movq %r11, %r15
41
- 00000000000000a1: 02 jmp 0x43518e <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0xee>
42
- 00000000000000a3: 05 movq 8(%rsp), %rdi
43
- 00000000000000a8: 07 movq 208(%rdi), %r11
44
- 00000000000000af: 07 movq 216(%rdi), %r10
45
- 00000000000000b6: 07 movq 224(%rdi), %rbx
46
- 00000000000000bd: 07 movq 232(%rdi), %rbp
47
- 00000000000000c4: 07 movq 240(%rdi), %rcx
48
- 00000000000000cb: 07 movq 248(%rdi), %rdx
49
- 00000000000000d2: 07 movq 256(%rdi), %r12
50
- 00000000000000d9: 07 movq 264(%rdi), %r13
51
- 00000000000000e0: 07 movq 272(%rdi), %r14
52
- 00000000000000e7: 07 movq 280(%rdi), %r15
53
- 00000000000000ee: 05 movq %rdx, 16(%rsp)
54
- 00000000000000f3: 05 addq 24(%rsp), %rax
55
- 00000000000000f8: 05 movq 8(%rsp), %rdx
56
- 00000000000000fd: 07 movq %rax, 288(%rdx)
57
- 0000000000000104: 05 movq 88(%rsp), %rdx
58
- 0000000000000109: 02 testb %dl, %dl
59
- 000000000000010b: 05 movq %rsi, 40(%rsp)
60
- 0000000000000110: 06 je 0x4353f9 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x359>
61
- 0000000000000116: 05 movl $192, %eax
62
- 000000000000011b: 03 subq %rdx, %rax
63
- 000000000000011e: 05 movq %rax, 80(%rsp)
64
- 0000000000000123: 05 movq 8(%rsp), %rax
65
- 0000000000000128: 04 leaq (%rax,%rdx), %rdi
66
- 000000000000012c: 05 movq 80(%rsp), %rdx
67
- 0000000000000131: 05 movq %r9, 72(%rsp)
68
- 0000000000000136: 05 movq %r8, 64(%rsp)
69
- 000000000000013b: 05 movq %r11, 96(%rsp)
70
- 0000000000000140: 05 movq %r10, 32(%rsp)
71
- 0000000000000145: 05 movq %rbx, 56(%rsp)
72
- 000000000000014a: 05 movq %rcx, 48(%rsp)
73
- 000000000000014f: 05 callq 0x404af0 <memcpy@plt>
74
- 0000000000000154: 05 movq 48(%rsp), %rcx
75
- 0000000000000159: 05 movq 56(%rsp), %rbx
76
- 000000000000015e: 05 movq 32(%rsp), %r10
77
- 0000000000000163: 05 movq 96(%rsp), %r11
78
- 0000000000000168: 05 movq 64(%rsp), %r8
79
- 000000000000016d: 05 movq 72(%rsp), %r9
80
- 0000000000000172: 05 movq 8(%rsp), %rdi
81
- 0000000000000177: 03 addq (%rdi), %r8
82
- 000000000000017a: 03 xorq %r8, %r15
83
- 000000000000017d: 04 rolq $11, %r8
84
- 0000000000000181: 03 addq %r9, %r15
85
- 0000000000000184: 04 addq 8(%rdi), %r9
86
- 0000000000000188: 03 xorq %r9, %r8
87
- 000000000000018b: 04 rolq $32, %r9
88
- 000000000000018f: 03 xorq %r14, %r11
89
- 0000000000000192: 03 addq %r11, %r8
90
- 0000000000000195: 04 addq 16(%rdi), %r11
91
- 0000000000000199: 03 xorq %r11, %r9
92
- 000000000000019c: 04 rolq $43, %r11
93
- 00000000000001a0: 03 xorq %r15, %r10
94
- 00000000000001a3: 03 addq %r10, %r9
95
- 00000000000001a6: 04 addq 24(%rdi), %r10
96
- 00000000000001aa: 03 xorq %r10, %r11
97
- 00000000000001ad: 04 rolq $31, %r10
98
- 00000000000001b1: 03 xorq %r8, %rbx
99
- 00000000000001b4: 03 addq %rbx, %r11
100
- 00000000000001b7: 04 addq 32(%rdi), %rbx
101
- 00000000000001bb: 03 xorq %rbx, %r10
102
- 00000000000001be: 04 rolq $17, %rbx
103
- 00000000000001c2: 03 xorq %r9, %rbp
104
- 00000000000001c5: 03 addq %rbp, %r10
105
- 00000000000001c8: 04 addq 40(%rdi), %rbp
106
- 00000000000001cc: 03 xorq %rbp, %rbx
107
- 00000000000001cf: 04 rolq $28, %rbp
108
- 00000000000001d3: 03 xorq %r11, %rcx
109
- 00000000000001d6: 03 addq %rcx, %rbx
110
- 00000000000001d9: 04 addq 48(%rdi), %rcx
111
- 00000000000001dd: 03 xorq %rcx, %rbp
112
- 00000000000001e0: 04 rolq $39, %rcx
113
- 00000000000001e4: 05 movq 16(%rsp), %rax
114
- 00000000000001e9: 03 xorq %r10, %rax
115
- 00000000000001ec: 03 addq %rax, %rbp
116
- 00000000000001ef: 04 addq 56(%rdi), %rax
117
- 00000000000001f3: 03 xorq %rax, %rcx
118
- 00000000000001f6: 04 rolq $57, %rax
119
- 00000000000001fa: 03 xorq %rbx, %r12
120
- 00000000000001fd: 03 addq %r12, %rcx
121
- 0000000000000200: 04 addq 64(%rdi), %r12
122
- 0000000000000204: 03 xorq %r12, %rax
123
- 0000000000000207: 04 rolq $55, %r12
124
- 000000000000020b: 03 xorq %rbp, %r13
125
- 000000000000020e: 03 addq %r13, %rax
126
- 0000000000000211: 04 addq 72(%rdi), %r13
127
- 0000000000000215: 03 xorq %r13, %r12
128
- 0000000000000218: 04 rolq $54, %r13
129
- 000000000000021c: 03 xorq %rcx, %r14
130
- 000000000000021f: 03 addq %r14, %r12
131
- 0000000000000222: 04 addq 80(%rdi), %r14
132
- 0000000000000226: 03 xorq %r14, %r13
133
- 0000000000000229: 04 rolq $22, %r14
134
- 000000000000022d: 03 xorq %rax, %r15
135
- 0000000000000230: 03 addq %r15, %r13
136
- 0000000000000233: 04 addq 88(%rdi), %r15
137
- 0000000000000237: 03 xorq %r15, %r14
138
- 000000000000023a: 04 rolq $46, %r15
139
- 000000000000023e: 03 xorq %r12, %r8
140
- 0000000000000241: 03 addq %r8, %r14
141
- 0000000000000244: 04 addq 96(%rdi), %r8
142
- 0000000000000248: 03 xorq %r8, %r15
143
- 000000000000024b: 04 rolq $11, %r8
144
- 000000000000024f: 03 xorq %r13, %r9
145
- 0000000000000252: 03 addq %r9, %r15
146
- 0000000000000255: 04 addq 104(%rdi), %r9
147
- 0000000000000259: 03 xorq %r9, %r8
148
- 000000000000025c: 04 rolq $32, %r9
149
- 0000000000000260: 03 xorq %r14, %r11
150
- 0000000000000263: 03 addq %r11, %r8
151
- 0000000000000266: 04 addq 112(%rdi), %r11
152
- 000000000000026a: 03 xorq %r11, %r9
153
- 000000000000026d: 04 rolq $43, %r11
154
- 0000000000000271: 03 xorq %r15, %r10
155
- 0000000000000274: 03 addq %r10, %r9
156
- 0000000000000277: 04 addq 120(%rdi), %r10
157
- 000000000000027b: 03 xorq %r10, %r11
158
- 000000000000027e: 04 rolq $31, %r10
159
- 0000000000000282: 03 xorq %r8, %rbx
160
- 0000000000000285: 03 addq %rbx, %r11
161
- 0000000000000288: 07 addq 128(%rdi), %rbx
162
- 000000000000028f: 03 xorq %rbx, %r10
163
- 0000000000000292: 04 rolq $17, %rbx
164
- 0000000000000296: 03 xorq %r9, %rbp
165
- 0000000000000299: 03 addq %rbp, %r10
166
- 000000000000029c: 07 addq 136(%rdi), %rbp
167
- 00000000000002a3: 03 xorq %rbp, %rbx
168
- 00000000000002a6: 04 rolq $28, %rbp
169
- 00000000000002aa: 03 xorq %r11, %rcx
170
- 00000000000002ad: 03 addq %rcx, %rbx
171
- 00000000000002b0: 07 addq 144(%rdi), %rcx
172
- 00000000000002b7: 03 xorq %r10, %rax
173
- 00000000000002ba: 03 xorq %rcx, %rbp
174
- 00000000000002bd: 04 rolq $39, %rcx
175
- 00000000000002c1: 03 addq %rax, %rbp
176
- 00000000000002c4: 07 addq 152(%rdi), %rax
177
- 00000000000002cb: 05 movq 88(%rsp), %rsi
178
- 00000000000002d0: 03 negq %rsi
179
- 00000000000002d3: 05 movq 40(%rsp), %rdx
180
- 00000000000002d8: 08 leaq 192(%rdx,%rsi), %rdx
181
- 00000000000002e0: 05 movq %rdx, 40(%rsp)
182
- 00000000000002e5: 03 xorq %rax, %rcx
183
- 00000000000002e8: 04 rolq $57, %rax
184
- 00000000000002ec: 03 xorq %rbx, %r12
185
- 00000000000002ef: 03 addq %r12, %rcx
186
- 00000000000002f2: 07 addq 160(%rdi), %r12
187
- 00000000000002f9: 05 movq 24(%rsp), %rsi
188
- 00000000000002fe: 05 subq 80(%rsp), %rsi
189
- 0000000000000303: 03 xorq %r12, %rax
190
- 0000000000000306: 04 rolq $55, %r12
191
- 000000000000030a: 03 xorq %rbp, %r13
192
- 000000000000030d: 03 addq %r13, %rax
193
- 0000000000000310: 07 addq 168(%rdi), %r13
194
- 0000000000000317: 03 xorq %rcx, %r14
195
- 000000000000031a: 03 xorq %r13, %r12
196
- 000000000000031d: 04 rolq $54, %r13
197
- 0000000000000321: 05 movq %rax, 16(%rsp)
198
- 0000000000000326: 03 xorq %rax, %r15
199
- 0000000000000329: 03 addq %r14, %r12
200
- 000000000000032c: 07 addq 176(%rdi), %r14
201
- 0000000000000333: 03 xorq %r12, %r8
202
- 0000000000000336: 03 xorq %r14, %r13
203
- 0000000000000339: 04 rolq $22, %r14
204
- 000000000000033d: 03 addq %r15, %r13
205
- 0000000000000340: 07 addq 184(%rdi), %r15
206
- 0000000000000347: 03 xorq %r13, %r9
207
- 000000000000034a: 03 xorq %r15, %r14
208
- 000000000000034d: 04 rolq $46, %r15
209
- 0000000000000351: 03 addq %r8, %r14
210
- 0000000000000354: 03 movq %rsi, %rdi
211
- 0000000000000357: 02 jmp 0x4353fe <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x35e>
212
- 0000000000000359: 05 movq 24(%rsp), %rdi
213
- 000000000000035e: 10 movabsq $-6148914691236517205, %rdx
214
- 0000000000000368: 03 movq %rdi, %rax
215
- 000000000000036b: 03 mulq %rdx
216
- 000000000000036e: 04 shrq $6, %rdx
217
- 0000000000000372: 04 leaq (%rdx,%rdx,2), %rsi
218
- 0000000000000376: 04 shlq $5, %rsi
219
- 000000000000037a: 05 movq 40(%rsp), %rax
220
- 000000000000037f: 03 addq %rax, %rsi
221
- 0000000000000382: 03 movq %rax, %rdx
222
- 0000000000000385: 03 subq %rsi, %rdx
223
- 0000000000000388: 03 addq %rdi, %rdx
224
- 000000000000038b: 05 movq %rdx, 24(%rsp)
225
- 0000000000000390: 03 cmpq %rsi, %rax
226
- 0000000000000393: 06 jae 0x43551a <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x47a>
227
- 0000000000000399: 05 movq 16(%rsp), %rdx
228
- 000000000000039e: 02 nop
229
- 00000000000003a0: 03 addq (%rax), %r8
230
- 00000000000003a3: 03 xorq %r8, %r15
231
- 00000000000003a6: 04 rolq $11, %r8
232
- 00000000000003aa: 03 addq %r9, %r15
233
- 00000000000003ad: 04 addq 8(%rax), %r9
234
- 00000000000003b1: 03 xorq %r9, %r8
235
- 00000000000003b4: 04 rolq $32, %r9
236
- 00000000000003b8: 03 xorq %r14, %r11
237
- 00000000000003bb: 03 addq %r11, %r8
238
- 00000000000003be: 04 addq 16(%rax), %r11
239
- 00000000000003c2: 03 xorq %r11, %r9
240
- 00000000000003c5: 04 rolq $43, %r11
241
- 00000000000003c9: 03 xorq %r15, %r10
242
- 00000000000003cc: 03 addq %r10, %r9
243
- 00000000000003cf: 04 addq 24(%rax), %r10
244
- 00000000000003d3: 03 xorq %r10, %r11
245
- 00000000000003d6: 04 rolq $31, %r10
246
- 00000000000003da: 03 xorq %r8, %rbx
247
- 00000000000003dd: 03 addq %rbx, %r11
248
- 00000000000003e0: 04 addq 32(%rax), %rbx
249
- 00000000000003e4: 03 xorq %rbx, %r10
250
- 00000000000003e7: 04 rolq $17, %rbx
251
- 00000000000003eb: 03 xorq %r9, %rbp
252
- 00000000000003ee: 03 addq %rbp, %r10
253
- 00000000000003f1: 04 addq 40(%rax), %rbp
254
- 00000000000003f5: 03 xorq %rbp, %rbx
255
- 00000000000003f8: 04 rolq $28, %rbp
256
- 00000000000003fc: 03 xorq %r11, %rcx
257
- 00000000000003ff: 03 addq %rcx, %rbx
258
- 0000000000000402: 04 addq 48(%rax), %rcx
259
- 0000000000000406: 03 xorq %rcx, %rbp
260
- 0000000000000409: 04 rolq $39, %rcx
261
- 000000000000040d: 03 xorq %r10, %rdx
262
- 0000000000000410: 03 addq %rdx, %rbp
263
- 0000000000000413: 04 addq 56(%rax), %rdx
264
- 0000000000000417: 03 xorq %rdx, %rcx
265
- 000000000000041a: 04 rolq $57, %rdx
266
- 000000000000041e: 03 xorq %rbx, %r12
267
- 0000000000000421: 03 addq %r12, %rcx
268
- 0000000000000424: 04 addq 64(%rax), %r12
269
- 0000000000000428: 03 xorq %r12, %rdx
270
- 000000000000042b: 04 rolq $55, %r12
271
- 000000000000042f: 03 xorq %rbp, %r13
272
- 0000000000000432: 03 addq %r13, %rdx
273
- 0000000000000435: 04 addq 72(%rax), %r13
274
- 0000000000000439: 03 xorq %r13, %r12
275
- 000000000000043c: 04 rolq $54, %r13
276
- 0000000000000440: 03 xorq %rcx, %r14
277
- 0000000000000443: 03 addq %r14, %r12
278
- 0000000000000446: 04 addq 80(%rax), %r14
279
- 000000000000044a: 03 xorq %r14, %r13
280
- 000000000000044d: 04 rolq $22, %r14
281
- 0000000000000451: 03 xorq %rdx, %r15
282
- 0000000000000454: 03 addq %r15, %r13
283
- 0000000000000457: 04 addq 88(%rax), %r15
284
- 000000000000045b: 03 xorq %r12, %r8
285
- 000000000000045e: 03 xorq %r15, %r14
286
- 0000000000000461: 04 rolq $46, %r15
287
- 0000000000000465: 03 xorq %r13, %r9
288
- 0000000000000468: 03 addq %r8, %r14
289
- 000000000000046b: 04 addq $96, %rax
290
- 000000000000046f: 03 cmpq %rax, %rsi
291
- 0000000000000472: 06 ja 0x435440 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x3a0>
292
- 0000000000000478: 02 jmp 0x43551f <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x47f>
293
- 000000000000047a: 05 movq 16(%rsp), %rdx
294
- 000000000000047f: 05 movq %r8, 64(%rsp)
295
- 0000000000000484: 05 movq %r9, 72(%rsp)
296
- 0000000000000489: 05 movq %r10, 32(%rsp)
297
- 000000000000048e: 05 movq %rbx, 56(%rsp)
298
- 0000000000000493: 05 movq %rcx, 48(%rsp)
299
- 0000000000000498: 05 movq %rdx, 16(%rsp)
300
- 000000000000049d: 05 movq 24(%rsp), %rax
301
- 00000000000004a2: 05 movq 8(%rsp), %rdi
302
- 00000000000004a7: 06 movb %al, 296(%rdi)
303
- 00000000000004ad: 03 movzbl %al, %edx
304
- 00000000000004b0: 03 movq %r11, %rbx
305
- 00000000000004b3: 05 callq 0x404af0 <memcpy@plt>
306
- 00000000000004b8: 05 movq 8(%rsp), %rax
307
- 00000000000004bd: 05 movq 64(%rsp), %rcx
308
- 00000000000004c2: 07 movq %rcx, 192(%rax)
309
- 00000000000004c9: 05 movq 72(%rsp), %rcx
310
- 00000000000004ce: 07 movq %rcx, 200(%rax)
311
- 00000000000004d5: 07 movq %rbx, 208(%rax)
312
- 00000000000004dc: 05 movq 32(%rsp), %rcx
313
- 00000000000004e1: 07 movq %rcx, 216(%rax)
314
- 00000000000004e8: 05 movq 56(%rsp), %rcx
315
- 00000000000004ed: 07 movq %rcx, 224(%rax)
316
- 00000000000004f4: 07 movq %rbp, 232(%rax)
317
- 00000000000004fb: 05 movq 48(%rsp), %rcx
318
- 0000000000000500: 07 movq %rcx, 240(%rax)
319
- 0000000000000507: 05 movq 16(%rsp), %rcx
320
- 000000000000050c: 07 movq %rcx, 248(%rax)
321
- 0000000000000513: 07 movq %r12, 256(%rax)
322
- 000000000000051a: 07 movq %r13, 264(%rax)
323
- 0000000000000521: 07 movq %r14, 272(%rax)
324
- 0000000000000528: 07 movq %r15, 280(%rax)
325
- 000000000000052f: 04 addq $104, %rsp
326
- 0000000000000533: 01 popq %rbx
327
- 0000000000000534: 02 popq %r12
328
- 0000000000000536: 02 popq %r13
329
- 0000000000000538: 02 popq %r14
330
- 000000000000053a: 02 popq %r15
331
- 000000000000053c: 01 popq %rbp
332
- 000000000000053d: 01 retq
333
- 000000000000053e: 02 nop
1
+ 0000000000435070 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)>:
2
  0000000000000000: 01 pushq %rbp
3
  0000000000000001: 02 pushq %r15
4
  0000000000000003: 02 pushq %r14
5
  0000000000000005: 02 pushq %r13
6
  0000000000000007: 02 pushq %r12
7
  0000000000000009: 01 pushq %rbx
8
+ 000000000000000a: 04 subq $88, %rsp
9
  000000000000000e: 03 movq %rdx, %rbp
10
  0000000000000011: 05 movq %rdi, 8(%rsp)
11
+ 0000000000000016: 07 movzbl 296(%rdi), %edi
12
+ 000000000000001d: 04 leaq (%rdi,%rdx), %rbx
13
  0000000000000021: 07 cmpq $191, %rbx
14
+ 0000000000000028: 02 ja 0x4350bd <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x4d>
15
  000000000000002a: 05 movq 8(%rsp), %r14
16
+ 000000000000002f: 03 addq %r14, %rdi
17
+ 0000000000000032: 03 movq %rbp, %rdx
18
+ 0000000000000035: 05 callq 0x404af0 <memcpy@plt>
19
+ 000000000000003a: 07 addq %rbp, 288(%r14)
20
+ 0000000000000041: 07 movb %bl, 296(%r14)
21
+ 0000000000000048: 05 jmp 0x43557f <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x50f>
22
+ 000000000000004d: 05 movq 8(%rsp), %rcx
23
+ 0000000000000052: 07 movq 288(%rcx), %rax
24
+ 0000000000000059: 07 movq 192(%rcx), %r9
25
+ 0000000000000060: 07 movq 200(%rcx), %r10
26
+ 0000000000000067: 06 cmpq $192, %rax
27
+ 000000000000006d: 05 movq %rbp, 24(%rsp)
28
+ 0000000000000072: 05 movq %rsi, 40(%rsp)
29
+ 0000000000000077: 02 jae 0x435112 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0xa2>
30
+ 0000000000000079: 10 movabsq $-2401053088876216593, %r11
31
+ 0000000000000083: 03 movq %r9, %r8
32
+ 0000000000000086: 05 movq %r10, 32(%rsp)
33
+ 000000000000008b: 03 movq %r11, %rdx
34
+ 000000000000008e: 03 movq %r9, %rcx
35
+ 0000000000000091: 03 movq %r10, %r14
36
+ 0000000000000094: 03 movq %r11, %r13
37
+ 0000000000000097: 03 movq %r9, %r15
38
+ 000000000000009a: 03 movq %r10, %rbx
39
+ 000000000000009d: 03 movq %r11, %r12
40
+ 00000000000000a0: 02 jmp 0x435162 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0xf2>
41
+ 00000000000000a2: 05 movq 8(%rsp), %rsi
42
+ 00000000000000a7: 07 movq 208(%rsi), %r11
43
+ 00000000000000ae: 07 movq 216(%rsi), %r8
44
+ 00000000000000b5: 07 movq 224(%rsi), %rdx
45
+ 00000000000000bc: 05 movq %rdx, 32(%rsp)
46
+ 00000000000000c1: 07 movq 232(%rsi), %rdx
47
+ 00000000000000c8: 07 movq 240(%rsi), %rcx
48
+ 00000000000000cf: 07 movq 248(%rsi), %r14
49
+ 00000000000000d6: 07 movq 256(%rsi), %r13
50
+ 00000000000000dd: 07 movq 264(%rsi), %r15
51
+ 00000000000000e4: 07 movq 272(%rsi), %rbx
52
+ 00000000000000eb: 07 movq 280(%rsi), %r12
53
+ 00000000000000f2: 05 movq %rdx, 16(%rsp)
54
+ 00000000000000f7: 05 addq 24(%rsp), %rax
55
+ 00000000000000fc: 05 movq 8(%rsp), %rdx
56
+ 0000000000000101: 07 movq %rax, 288(%rdx)
57
+ 0000000000000108: 03 testb %dil, %dil
58
+ 000000000000010b: 06 je 0x4353a5 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x335>
59
+ 0000000000000111: 02 movb $-64, %al
60
+ 0000000000000113: 03 subb %dil, %al
61
+ 0000000000000116: 05 addq 8(%rsp), %rdi
62
+ 000000000000011b: 03 movzbl %al, %edx
63
+ 000000000000011e: 05 movq %rdx, 80(%rsp)
64
+ 0000000000000123: 05 movq 40(%rsp), %rsi
65
+ 0000000000000128: 05 movq %r10, 72(%rsp)
66
+ 000000000000012d: 05 movq %r9, 64(%rsp)
67
+ 0000000000000132: 05 movq %r11, 56(%rsp)
68
+ 0000000000000137: 05 movq %r8, 48(%rsp)
69
+ 000000000000013c: 03 movq %rcx, %rbp
70
+ 000000000000013f: 05 callq 0x404af0 <memcpy@plt>
71
+ 0000000000000144: 03 movq %rbp, %rcx
72
+ 0000000000000147: 05 movq 48(%rsp), %r8
73
+ 000000000000014c: 05 movq 56(%rsp), %r11
74
+ 0000000000000151: 05 movq 64(%rsp), %r9
75
+ 0000000000000156: 05 movq 72(%rsp), %r10
76
+ 000000000000015b: 05 movq 8(%rsp), %rdi
77
+ 0000000000000160: 03 addq (%rdi), %r9
78
+ 0000000000000163: 03 xorq %r9, %r12
79
+ 0000000000000166: 04 rolq $11, %r9
80
+ 000000000000016a: 03 addq %r10, %r12
81
+ 000000000000016d: 04 addq 8(%rdi), %r10
82
+ 0000000000000171: 03 xorq %r10, %r9
83
+ 0000000000000174: 04 rolq $32, %r10
84
+ 0000000000000178: 03 xorq %rbx, %r11
85
+ 000000000000017b: 03 addq %r11, %r9
86
+ 000000000000017e: 04 addq 16(%rdi), %r11
87
+ 0000000000000182: 03 xorq %r11, %r10
88
+ 0000000000000185: 04 rolq $43, %r11
89
+ 0000000000000189: 03 xorq %r12, %r8
90
+ 000000000000018c: 03 addq %r8, %r10
91
+ 000000000000018f: 04 addq 24(%rdi), %r8
92
+ 0000000000000193: 03 xorq %r8, %r11
93
+ 0000000000000196: 04 rolq $31, %r8
94
+ 000000000000019a: 05 movq 32(%rsp), %rbp
95
+ 000000000000019f: 03 xorq %r9, %rbp
96
+ 00000000000001a2: 03 addq %rbp, %r11
97
+ 00000000000001a5: 04 addq 32(%rdi), %rbp
98
+ 00000000000001a9: 03 xorq %rbp, %r8
99
+ 00000000000001ac: 04 rolq $17, %rbp
100
+ 00000000000001b0: 05 movq 16(%rsp), %rax
101
+ 00000000000001b5: 03 xorq %r10, %rax
102
+ 00000000000001b8: 03 addq %rax, %r8
103
+ 00000000000001bb: 04 addq 40(%rdi), %rax
104
+ 00000000000001bf: 03 xorq %rax, %rbp
105
+ 00000000000001c2: 04 rolq $28, %rax
106
+ 00000000000001c6: 03 xorq %r11, %rcx
107
+ 00000000000001c9: 03 addq %rcx, %rbp
108
+ 00000000000001cc: 04 addq 48(%rdi), %rcx
109
+ 00000000000001d0: 03 xorq %rcx, %rax
110
+ 00000000000001d3: 04 rolq $39, %rcx
111
+ 00000000000001d7: 03 xorq %r8, %r14
112
+ 00000000000001da: 03 addq %r14, %rax
113
+ 00000000000001dd: 04 addq 56(%rdi), %r14
114
+ 00000000000001e1: 03 xorq %r14, %rcx
115
+ 00000000000001e4: 04 rolq $57, %r14
116
+ 00000000000001e8: 03 xorq %rbp, %r13
117
+ 00000000000001eb: 03 addq %r13, %rcx
118
+ 00000000000001ee: 04 addq 64(%rdi), %r13
119
+ 00000000000001f2: 03 xorq %r13, %r14
120
+ 00000000000001f5: 04 rolq $55, %r13
121
+ 00000000000001f9: 03 xorq %rax, %r15
122
+ 00000000000001fc: 03 addq %r15, %r14
123
+ 00000000000001ff: 04 addq 72(%rdi), %r15
124
+ 0000000000000203: 03 xorq %r15, %r13
125
+ 0000000000000206: 04 rolq $54, %r15
126
+ 000000000000020a: 03 xorq %rcx, %rbx
127
+ 000000000000020d: 03 addq %rbx, %r13
128
+ 0000000000000210: 04 addq 80(%rdi), %rbx
129
+ 0000000000000214: 03 xorq %rbx, %r15
130
+ 0000000000000217: 04 rolq $22, %rbx
131
+ 000000000000021b: 03 xorq %r14, %r12
132
+ 000000000000021e: 03 addq %r12, %r15
133
+ 0000000000000221: 04 addq 88(%rdi), %r12
134
+ 0000000000000225: 03 xorq %r12, %rbx
135
+ 0000000000000228: 04 rolq $46, %r12
136
+ 000000000000022c: 03 xorq %r13, %r9
137
+ 000000000000022f: 03 addq %r9, %rbx
138
+ 0000000000000232: 04 addq 96(%rdi), %r9
139
+ 0000000000000236: 03 xorq %r9, %r12
140
+ 0000000000000239: 04 rolq $11, %r9
141
+ 000000000000023d: 03 xorq %r15, %r10
142
+ 0000000000000240: 03 addq %r10, %r12
143
+ 0000000000000243: 04 addq 104(%rdi), %r10
144
+ 0000000000000247: 03 xorq %r10, %r9
145
+ 000000000000024a: 04 rolq $32, %r10
146
+ 000000000000024e: 03 xorq %rbx, %r11
147
+ 0000000000000251: 03 addq %r11, %r9
148
+ 0000000000000254: 04 addq 112(%rdi), %r11
149
+ 0000000000000258: 03 xorq %r11, %r10
150
+ 000000000000025b: 04 rolq $43, %r11
151
+ 000000000000025f: 03 xorq %r12, %r8
152
+ 0000000000000262: 03 addq %r8, %r10
153
+ 0000000000000265: 04 addq 120(%rdi), %r8
154
+ 0000000000000269: 03 xorq %r8, %r11
155
+ 000000000000026c: 04 rolq $31, %r8
156
+ 0000000000000270: 03 xorq %r9, %rbp
157
+ 0000000000000273: 03 addq %rbp, %r11
158
+ 0000000000000276: 07 addq 128(%rdi), %rbp
159
+ 000000000000027d: 03 xorq %rbp, %r8
160
+ 0000000000000280: 04 rolq $17, %rbp
161
+ 0000000000000284: 03 xorq %r10, %rax
162
+ 0000000000000287: 03 addq %rax, %r8
163
+ 000000000000028a: 07 addq 136(%rdi), %rax
164
+ 0000000000000291: 03 xorq %rax, %rbp
165
+ 0000000000000294: 04 rolq $28, %rax
166
+ 0000000000000298: 03 xorq %r11, %rcx
167
+ 000000000000029b: 03 addq %rcx, %rbp
168
+ 000000000000029e: 07 addq 144(%rdi), %rcx
169
+ 00000000000002a5: 03 xorq %rcx, %rax
170
+ 00000000000002a8: 04 rolq $39, %rcx
171
+ 00000000000002ac: 03 xorq %r8, %r14
172
+ 00000000000002af: 03 addq %r14, %rax
173
+ 00000000000002b2: 07 addq 152(%rdi), %r14
174
+ 00000000000002b9: 03 xorq %rbp, %r13
175
+ 00000000000002bc: 03 xorq %r14, %rcx
176
+ 00000000000002bf: 04 rolq $57, %r14
177
+ 00000000000002c3: 03 addq %r13, %rcx
178
+ 00000000000002c6: 07 addq 160(%rdi), %r13
179
+ 00000000000002cd: 05 movq 80(%rsp), %rdx
180
+ 00000000000002d2: 05 addq %rdx, 40(%rsp)
181
+ 00000000000002d7: 05 movq 24(%rsp), %rsi
182
+ 00000000000002dc: 03 subq %rdx, %rsi
183
+ 00000000000002df: 03 xorq %r13, %r14
184
+ 00000000000002e2: 04 rolq $55, %r13
185
+ 00000000000002e6: 05 movq %rax, 16(%rsp)
186
+ 00000000000002eb: 03 xorq %rax, %r15
187
+ 00000000000002ee: 03 addq %r15, %r14
188
+ 00000000000002f1: 07 addq 168(%rdi), %r15
189
+ 00000000000002f8: 03 xorq %rcx, %rbx
190
+ 00000000000002fb: 03 xorq %r15, %r13
191
+ 00000000000002fe: 04 rolq $54, %r15
192
+ 0000000000000302: 03 xorq %r14, %r12
193
+ 0000000000000305: 03 addq %rbx, %r13
194
+ 0000000000000308: 07 addq 176(%rdi), %rbx
195
+ 000000000000030f: 03 xorq %r13, %r9
196
+ 0000000000000312: 03 xorq %rbx, %r15
197
+ 0000000000000315: 04 rolq $22, %rbx
198
+ 0000000000000319: 03 addq %r12, %r15
199
+ 000000000000031c: 07 addq 184(%rdi), %r12
200
+ 0000000000000323: 03 xorq %r15, %r10
201
+ 0000000000000326: 03 xorq %r12, %rbx
202
+ 0000000000000329: 04 rolq $46, %r12
203
+ 000000000000032d: 03 addq %r9, %rbx
204
+ 0000000000000330: 03 movq %rsi, %rdi
205
+ 0000000000000333: 02 jmp 0x4353af <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x33f>
206
+ 0000000000000335: 05 movq 32(%rsp), %rbp
207
+ 000000000000033a: 05 movq 24(%rsp), %rdi
208
+ 000000000000033f: 10 movabsq $-6148914691236517205, %rdx
209
+ 0000000000000349: 03 movq %rdi, %rax
210
+ 000000000000034c: 03 mulq %rdx
211
+ 000000000000034f: 04 shrq $6, %rdx
212
+ 0000000000000353: 04 leaq (%rdx,%rdx,2), %rsi
213
+ 0000000000000357: 04 shlq $5, %rsi
214
+ 000000000000035b: 05 movq 40(%rsp), %rax
215
+ 0000000000000360: 03 addq %rax, %rsi
216
+ 0000000000000363: 03 movq %rax, %rdx
217
+ 0000000000000366: 03 subq %rsi, %rdx
218
+ 0000000000000369: 03 addq %rdi, %rdx
219
+ 000000000000036c: 05 movq %rdx, 24(%rsp)
220
+ 0000000000000371: 03 cmpq %rsi, %rax
221
+ 0000000000000374: 06 jae 0x4354ca <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x45a>
222
+ 000000000000037a: 05 movq 16(%rsp), %rdx
223
+ 000000000000037f: 01 nop
224
+ 0000000000000380: 03 addq (%rax), %r9
225
+ 0000000000000383: 03 xorq %r9, %r12
226
+ 0000000000000386: 04 rolq $11, %r9
227
+ 000000000000038a: 03 addq %r10, %r12
228
+ 000000000000038d: 04 addq 8(%rax), %r10
229
+ 0000000000000391: 03 xorq %r10, %r9
230
+ 0000000000000394: 04 rolq $32, %r10
231
+ 0000000000000398: 03 xorq %rbx, %r11
232
+ 000000000000039b: 03 addq %r11, %r9
233
+ 000000000000039e: 04 addq 16(%rax), %r11
234
+ 00000000000003a2: 03 xorq %r11, %r10
235
+ 00000000000003a5: 04 rolq $43, %r11
236
+ 00000000000003a9: 03 xorq %r12, %r8
237
+ 00000000000003ac: 03 addq %r8, %r10
238
+ 00000000000003af: 04 addq 24(%rax), %r8
239
+ 00000000000003b3: 03 xorq %r8, %r11
240
+ 00000000000003b6: 04 rolq $31, %r8
241
+ 00000000000003ba: 03 xorq %r9, %rbp
242
+ 00000000000003bd: 03 addq %rbp, %r11
243
+ 00000000000003c0: 04 addq 32(%rax), %rbp
244
+ 00000000000003c4: 03 xorq %rbp, %r8
245
+ 00000000000003c7: 04 rolq $17, %rbp
246
+ 00000000000003cb: 03 xorq %r10, %rdx
247
+ 00000000000003ce: 03 addq %rdx, %r8
248
+ 00000000000003d1: 04 addq 40(%rax), %rdx
249
+ 00000000000003d5: 03 xorq %rdx, %rbp
250
+ 00000000000003d8: 04 rolq $28, %rdx
251
+ 00000000000003dc: 03 xorq %r11, %rcx
252
+ 00000000000003df: 03 addq %rcx, %rbp
253
+ 00000000000003e2: 04 addq 48(%rax), %rcx
254
+ 00000000000003e6: 03 xorq %rcx, %rdx
255
+ 00000000000003e9: 04 rolq $39, %rcx
256
+ 00000000000003ed: 03 xorq %r8, %r14
257
+ 00000000000003f0: 03 addq %r14, %rdx
258
+ 00000000000003f3: 04 addq 56(%rax), %r14
259
+ 00000000000003f7: 03 xorq %r14, %rcx
260
+ 00000000000003fa: 04 rolq $57, %r14
261
+ 00000000000003fe: 03 xorq %rbp, %r13
262
+ 0000000000000401: 03 addq %r13, %rcx
263
+ 0000000000000404: 04 addq 64(%rax), %r13
264
+ 0000000000000408: 03 xorq %r13, %r14
265
+ 000000000000040b: 04 rolq $55, %r13
266
+ 000000000000040f: 03 xorq %rdx, %r15
267
+ 0000000000000412: 03 addq %r15, %r14
268
+ 0000000000000415: 04 addq 72(%rax), %r15
269
+ 0000000000000419: 03 xorq %r15, %r13
270
+ 000000000000041c: 04 rolq $54, %r15
271
+ 0000000000000420: 03 xorq %rcx, %rbx
272
+ 0000000000000423: 03 addq %rbx, %r13
273
+ 0000000000000426: 04 addq 80(%rax), %rbx
274
+ 000000000000042a: 03 xorq %rbx, %r15
275
+ 000000000000042d: 04 rolq $22, %rbx
276
+ 0000000000000431: 03 xorq %r14, %r12
277
+ 0000000000000434: 03 addq %r12, %r15
278
+ 0000000000000437: 04 addq 88(%rax), %r12
279
+ 000000000000043b: 03 xorq %r13, %r9
280
+ 000000000000043e: 03 xorq %r12, %rbx
281
+ 0000000000000441: 04 rolq $46, %r12
282
+ 0000000000000445: 03 xorq %r15, %r10
283
+ 0000000000000448: 03 addq %r9, %rbx
284
+ 000000000000044b: 04 addq $96, %rax
285
+ 000000000000044f: 03 cmpq %rax, %rsi
286
+ 0000000000000452: 06 ja 0x4353f0 <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x380>
287
+ 0000000000000458: 02 jmp 0x4354cf <BloombergLP::bslh::SpookyHashAlgorithmImp::update(void const*, unsigned long)+0x45f>
288
+ 000000000000045a: 05 movq 16(%rsp), %rdx
289
+ 000000000000045f: 05 movq %r9, 64(%rsp)
290
+ 0000000000000464: 05 movq %r10, 72(%rsp)
291
+ 0000000000000469: 05 movq %r11, 56(%rsp)
292
+ 000000000000046e: 05 movq %r8, 48(%rsp)
293
+ 0000000000000473: 05 movq %rbp, 32(%rsp)
294
+ 0000000000000478: 05 movq %rdx, 16(%rsp)
295
+ 000000000000047d: 05 movq 24(%rsp), %rax
296
+ 0000000000000482: 05 movq 8(%rsp), %rdi
297
+ 0000000000000487: 06 movb %al, 296(%rdi)
298
+ 000000000000048d: 03 movzbl %al, %edx
299
+ 0000000000000490: 03 movq %rcx, %rbp
300
+ 0000000000000493: 05 callq 0x404af0 <memcpy@plt>
301
+ 0000000000000498: 05 movq 8(%rsp), %rax
302
+ 000000000000049d: 05 movq 64(%rsp), %rcx
303
+ 00000000000004a2: 07 movq %rcx, 192(%rax)
304
+ 00000000000004a9: 05 movq 72(%rsp), %rcx
305
+ 00000000000004ae: 07 movq %rcx, 200(%rax)
306
+ 00000000000004b5: 05 movq 56(%rsp), %rcx
307
+ 00000000000004ba: 07 movq %rcx, 208(%rax)
308
+ 00000000000004c1: 05 movq 48(%rsp), %rcx
309
+ 00000000000004c6: 07 movq %rcx, 216(%rax)
310
+ 00000000000004cd: 05 movq 32(%rsp), %rcx
311
+ 00000000000004d2: 07 movq %rcx, 224(%rax)
312
+ 00000000000004d9: 05 movq 16(%rsp), %rcx
313
+ 00000000000004de: 07 movq %rcx, 232(%rax)
314
+ 00000000000004e5: 07 movq %rbp, 240(%rax)
315
+ 00000000000004ec: 07 movq %r14, 248(%rax)
316
+ 00000000000004f3: 07 movq %r13, 256(%rax)
317
+ 00000000000004fa: 07 movq %r15, 264(%rax)
318
+ 0000000000000501: 07 movq %rbx, 272(%rax)
319
+ 0000000000000508: 07 movq %r12, 280(%rax)
320
+ 000000000000050f: 04 addq $88, %rsp
321
+ 0000000000000513: 01 popq %rbx
322
+ 0000000000000514: 02 popq %r12
323
+ 0000000000000516: 02 popq %r13
324
+ 0000000000000518: 02 popq %r14
325
+ 000000000000051a: 02 popq %r15
326
+ 000000000000051c: 01 popq %rbp
327
+ 000000000000051d: 01 retq
328
+ 000000000000051e: 02 nop