Alexandre Lision | f26d3e5 | 2014-04-14 16:22:31 -0400 | [diff] [blame] | 1 | .file "crypto/bn/asm/x86-mont.s" |
| 2 | .text |
| 3 | .globl bn_mul_mont |
| 4 | .type bn_mul_mont,@function |
| 5 | .align 16 |
| 6 | bn_mul_mont: |
| 7 | .L_bn_mul_mont_begin: |
| 8 | pushl %ebp |
| 9 | pushl %ebx |
| 10 | pushl %esi |
| 11 | pushl %edi |
| 12 | xorl %eax,%eax |
| 13 | movl 40(%esp),%edi |
| 14 | cmpl $4,%edi |
| 15 | jl .L000just_leave |
| 16 | leal 20(%esp),%esi |
| 17 | leal 24(%esp),%edx |
| 18 | movl %esp,%ebp |
| 19 | addl $2,%edi |
| 20 | negl %edi |
| 21 | leal -32(%esp,%edi,4),%esp |
| 22 | negl %edi |
| 23 | movl %esp,%eax |
| 24 | subl %edx,%eax |
| 25 | andl $2047,%eax |
| 26 | subl %eax,%esp |
| 27 | xorl %esp,%edx |
| 28 | andl $2048,%edx |
| 29 | xorl $2048,%edx |
| 30 | subl %edx,%esp |
| 31 | andl $-64,%esp |
| 32 | movl (%esi),%eax |
| 33 | movl 4(%esi),%ebx |
| 34 | movl 8(%esi),%ecx |
| 35 | movl 12(%esi),%edx |
| 36 | movl 16(%esi),%esi |
| 37 | movl (%esi),%esi |
| 38 | movl %eax,4(%esp) |
| 39 | movl %ebx,8(%esp) |
| 40 | movl %ecx,12(%esp) |
| 41 | movl %edx,16(%esp) |
| 42 | movl %esi,20(%esp) |
| 43 | leal -3(%edi),%ebx |
| 44 | movl %ebp,24(%esp) |
| 45 | movl 8(%esp),%esi |
| 46 | leal 1(%ebx),%ebp |
| 47 | movl 12(%esp),%edi |
| 48 | xorl %ecx,%ecx |
| 49 | movl %esi,%edx |
| 50 | andl $1,%ebp |
| 51 | subl %edi,%edx |
| 52 | leal 4(%edi,%ebx,4),%eax |
| 53 | orl %edx,%ebp |
| 54 | movl (%edi),%edi |
| 55 | jz .L001bn_sqr_mont |
| 56 | movl %eax,28(%esp) |
| 57 | movl (%esi),%eax |
| 58 | xorl %edx,%edx |
| 59 | .align 16 |
| 60 | .L002mull: |
| 61 | movl %edx,%ebp |
| 62 | mull %edi |
| 63 | addl %eax,%ebp |
| 64 | leal 1(%ecx),%ecx |
| 65 | adcl $0,%edx |
| 66 | movl (%esi,%ecx,4),%eax |
| 67 | cmpl %ebx,%ecx |
| 68 | movl %ebp,28(%esp,%ecx,4) |
| 69 | jl .L002mull |
| 70 | movl %edx,%ebp |
| 71 | mull %edi |
| 72 | movl 20(%esp),%edi |
| 73 | addl %ebp,%eax |
| 74 | movl 16(%esp),%esi |
| 75 | adcl $0,%edx |
| 76 | imull 32(%esp),%edi |
| 77 | movl %eax,32(%esp,%ebx,4) |
| 78 | xorl %ecx,%ecx |
| 79 | movl %edx,36(%esp,%ebx,4) |
| 80 | movl %ecx,40(%esp,%ebx,4) |
| 81 | movl (%esi),%eax |
| 82 | mull %edi |
| 83 | addl 32(%esp),%eax |
| 84 | movl 4(%esi),%eax |
| 85 | adcl $0,%edx |
| 86 | incl %ecx |
| 87 | jmp .L0032ndmadd |
| 88 | .align 16 |
| 89 | .L0041stmadd: |
| 90 | movl %edx,%ebp |
| 91 | mull %edi |
| 92 | addl 32(%esp,%ecx,4),%ebp |
| 93 | leal 1(%ecx),%ecx |
| 94 | adcl $0,%edx |
| 95 | addl %eax,%ebp |
| 96 | movl (%esi,%ecx,4),%eax |
| 97 | adcl $0,%edx |
| 98 | cmpl %ebx,%ecx |
| 99 | movl %ebp,28(%esp,%ecx,4) |
| 100 | jl .L0041stmadd |
| 101 | movl %edx,%ebp |
| 102 | mull %edi |
| 103 | addl 32(%esp,%ebx,4),%eax |
| 104 | movl 20(%esp),%edi |
| 105 | adcl $0,%edx |
| 106 | movl 16(%esp),%esi |
| 107 | addl %eax,%ebp |
| 108 | adcl $0,%edx |
| 109 | imull 32(%esp),%edi |
| 110 | xorl %ecx,%ecx |
| 111 | addl 36(%esp,%ebx,4),%edx |
| 112 | movl %ebp,32(%esp,%ebx,4) |
| 113 | adcl $0,%ecx |
| 114 | movl (%esi),%eax |
| 115 | movl %edx,36(%esp,%ebx,4) |
| 116 | movl %ecx,40(%esp,%ebx,4) |
| 117 | mull %edi |
| 118 | addl 32(%esp),%eax |
| 119 | movl 4(%esi),%eax |
| 120 | adcl $0,%edx |
| 121 | movl $1,%ecx |
| 122 | .align 16 |
| 123 | .L0032ndmadd: |
| 124 | movl %edx,%ebp |
| 125 | mull %edi |
| 126 | addl 32(%esp,%ecx,4),%ebp |
| 127 | leal 1(%ecx),%ecx |
| 128 | adcl $0,%edx |
| 129 | addl %eax,%ebp |
| 130 | movl (%esi,%ecx,4),%eax |
| 131 | adcl $0,%edx |
| 132 | cmpl %ebx,%ecx |
| 133 | movl %ebp,24(%esp,%ecx,4) |
| 134 | jl .L0032ndmadd |
| 135 | movl %edx,%ebp |
| 136 | mull %edi |
| 137 | addl 32(%esp,%ebx,4),%ebp |
| 138 | adcl $0,%edx |
| 139 | addl %eax,%ebp |
| 140 | adcl $0,%edx |
| 141 | movl %ebp,28(%esp,%ebx,4) |
| 142 | xorl %eax,%eax |
| 143 | movl 12(%esp),%ecx |
| 144 | addl 36(%esp,%ebx,4),%edx |
| 145 | adcl 40(%esp,%ebx,4),%eax |
| 146 | leal 4(%ecx),%ecx |
| 147 | movl %edx,32(%esp,%ebx,4) |
| 148 | cmpl 28(%esp),%ecx |
| 149 | movl %eax,36(%esp,%ebx,4) |
| 150 | je .L005common_tail |
| 151 | movl (%ecx),%edi |
| 152 | movl 8(%esp),%esi |
| 153 | movl %ecx,12(%esp) |
| 154 | xorl %ecx,%ecx |
| 155 | xorl %edx,%edx |
| 156 | movl (%esi),%eax |
| 157 | jmp .L0041stmadd |
| 158 | .align 16 |
| 159 | .L001bn_sqr_mont: |
| 160 | movl %ebx,(%esp) |
| 161 | movl %ecx,12(%esp) |
| 162 | movl %edi,%eax |
| 163 | mull %edi |
| 164 | movl %eax,32(%esp) |
| 165 | movl %edx,%ebx |
| 166 | shrl $1,%edx |
| 167 | andl $1,%ebx |
| 168 | incl %ecx |
| 169 | .align 16 |
| 170 | .L006sqr: |
| 171 | movl (%esi,%ecx,4),%eax |
| 172 | movl %edx,%ebp |
| 173 | mull %edi |
| 174 | addl %ebp,%eax |
| 175 | leal 1(%ecx),%ecx |
| 176 | adcl $0,%edx |
| 177 | leal (%ebx,%eax,2),%ebp |
| 178 | shrl $31,%eax |
| 179 | cmpl (%esp),%ecx |
| 180 | movl %eax,%ebx |
| 181 | movl %ebp,28(%esp,%ecx,4) |
| 182 | jl .L006sqr |
| 183 | movl (%esi,%ecx,4),%eax |
| 184 | movl %edx,%ebp |
| 185 | mull %edi |
| 186 | addl %ebp,%eax |
| 187 | movl 20(%esp),%edi |
| 188 | adcl $0,%edx |
| 189 | movl 16(%esp),%esi |
| 190 | leal (%ebx,%eax,2),%ebp |
| 191 | imull 32(%esp),%edi |
| 192 | shrl $31,%eax |
| 193 | movl %ebp,32(%esp,%ecx,4) |
| 194 | leal (%eax,%edx,2),%ebp |
| 195 | movl (%esi),%eax |
| 196 | shrl $31,%edx |
| 197 | movl %ebp,36(%esp,%ecx,4) |
| 198 | movl %edx,40(%esp,%ecx,4) |
| 199 | mull %edi |
| 200 | addl 32(%esp),%eax |
| 201 | movl %ecx,%ebx |
| 202 | adcl $0,%edx |
| 203 | movl 4(%esi),%eax |
| 204 | movl $1,%ecx |
| 205 | .align 16 |
| 206 | .L0073rdmadd: |
| 207 | movl %edx,%ebp |
| 208 | mull %edi |
| 209 | addl 32(%esp,%ecx,4),%ebp |
| 210 | adcl $0,%edx |
| 211 | addl %eax,%ebp |
| 212 | movl 4(%esi,%ecx,4),%eax |
| 213 | adcl $0,%edx |
| 214 | movl %ebp,28(%esp,%ecx,4) |
| 215 | movl %edx,%ebp |
| 216 | mull %edi |
| 217 | addl 36(%esp,%ecx,4),%ebp |
| 218 | leal 2(%ecx),%ecx |
| 219 | adcl $0,%edx |
| 220 | addl %eax,%ebp |
| 221 | movl (%esi,%ecx,4),%eax |
| 222 | adcl $0,%edx |
| 223 | cmpl %ebx,%ecx |
| 224 | movl %ebp,24(%esp,%ecx,4) |
| 225 | jl .L0073rdmadd |
| 226 | movl %edx,%ebp |
| 227 | mull %edi |
| 228 | addl 32(%esp,%ebx,4),%ebp |
| 229 | adcl $0,%edx |
| 230 | addl %eax,%ebp |
| 231 | adcl $0,%edx |
| 232 | movl %ebp,28(%esp,%ebx,4) |
| 233 | movl 12(%esp),%ecx |
| 234 | xorl %eax,%eax |
| 235 | movl 8(%esp),%esi |
| 236 | addl 36(%esp,%ebx,4),%edx |
| 237 | adcl 40(%esp,%ebx,4),%eax |
| 238 | movl %edx,32(%esp,%ebx,4) |
| 239 | cmpl %ebx,%ecx |
| 240 | movl %eax,36(%esp,%ebx,4) |
| 241 | je .L005common_tail |
| 242 | movl 4(%esi,%ecx,4),%edi |
| 243 | leal 1(%ecx),%ecx |
| 244 | movl %edi,%eax |
| 245 | movl %ecx,12(%esp) |
| 246 | mull %edi |
| 247 | addl 32(%esp,%ecx,4),%eax |
| 248 | adcl $0,%edx |
| 249 | movl %eax,32(%esp,%ecx,4) |
| 250 | xorl %ebp,%ebp |
| 251 | cmpl %ebx,%ecx |
| 252 | leal 1(%ecx),%ecx |
| 253 | je .L008sqrlast |
| 254 | movl %edx,%ebx |
| 255 | shrl $1,%edx |
| 256 | andl $1,%ebx |
| 257 | .align 16 |
| 258 | .L009sqradd: |
| 259 | movl (%esi,%ecx,4),%eax |
| 260 | movl %edx,%ebp |
| 261 | mull %edi |
| 262 | addl %ebp,%eax |
| 263 | leal (%eax,%eax,1),%ebp |
| 264 | adcl $0,%edx |
| 265 | shrl $31,%eax |
| 266 | addl 32(%esp,%ecx,4),%ebp |
| 267 | leal 1(%ecx),%ecx |
| 268 | adcl $0,%eax |
| 269 | addl %ebx,%ebp |
| 270 | adcl $0,%eax |
| 271 | cmpl (%esp),%ecx |
| 272 | movl %ebp,28(%esp,%ecx,4) |
| 273 | movl %eax,%ebx |
| 274 | jle .L009sqradd |
| 275 | movl %edx,%ebp |
| 276 | addl %edx,%edx |
| 277 | shrl $31,%ebp |
| 278 | addl %ebx,%edx |
| 279 | adcl $0,%ebp |
| 280 | .L008sqrlast: |
| 281 | movl 20(%esp),%edi |
| 282 | movl 16(%esp),%esi |
| 283 | imull 32(%esp),%edi |
| 284 | addl 32(%esp,%ecx,4),%edx |
| 285 | movl (%esi),%eax |
| 286 | adcl $0,%ebp |
| 287 | movl %edx,32(%esp,%ecx,4) |
| 288 | movl %ebp,36(%esp,%ecx,4) |
| 289 | mull %edi |
| 290 | addl 32(%esp),%eax |
| 291 | leal -1(%ecx),%ebx |
| 292 | adcl $0,%edx |
| 293 | movl $1,%ecx |
| 294 | movl 4(%esi),%eax |
| 295 | jmp .L0073rdmadd |
| 296 | .align 16 |
| 297 | .L005common_tail: |
| 298 | movl 16(%esp),%ebp |
| 299 | movl 4(%esp),%edi |
| 300 | leal 32(%esp),%esi |
| 301 | movl (%esi),%eax |
| 302 | movl %ebx,%ecx |
| 303 | xorl %edx,%edx |
| 304 | .align 16 |
| 305 | .L010sub: |
| 306 | sbbl (%ebp,%edx,4),%eax |
| 307 | movl %eax,(%edi,%edx,4) |
| 308 | decl %ecx |
| 309 | movl 4(%esi,%edx,4),%eax |
| 310 | leal 1(%edx),%edx |
| 311 | jge .L010sub |
| 312 | sbbl $0,%eax |
| 313 | andl %eax,%esi |
| 314 | notl %eax |
| 315 | movl %edi,%ebp |
| 316 | andl %eax,%ebp |
| 317 | orl %ebp,%esi |
| 318 | .align 16 |
| 319 | .L011copy: |
| 320 | movl (%esi,%ebx,4),%eax |
| 321 | movl %eax,(%edi,%ebx,4) |
| 322 | movl %ecx,32(%esp,%ebx,4) |
| 323 | decl %ebx |
| 324 | jge .L011copy |
| 325 | movl 24(%esp),%esp |
| 326 | movl $1,%eax |
| 327 | .L000just_leave: |
| 328 | popl %edi |
| 329 | popl %esi |
| 330 | popl %ebx |
| 331 | popl %ebp |
| 332 | ret |
| 333 | .size bn_mul_mont,.-.L_bn_mul_mont_begin |
| 334 | .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105 |
| 335 | .byte 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56 |
| 336 | .byte 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121 |
| 337 | .byte 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46 |
| 338 | .byte 111,114,103,62,0 |