diff --git a/COPYING b/COPYING
new file mode 100644
index 0000000..94a9ed0
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/COPYING.LESSER b/COPYING.LESSER
new file mode 100644
index 0000000..65c5ca8
--- /dev/null
+++ b/COPYING.LESSER
@@ -0,0 +1,165 @@
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/README b/README
new file mode 100644
index 0000000..c63333e
--- /dev/null
+++ b/README
@@ -0,0 +1,40 @@
+Sphinx v0.8 README
+2011-03-06
+Ian Goldberg
+
+Except for the public-domain curve25519 portions by Dan Bernstein, this
+software is licenced as follows:
+
+# Copyright 2011 Ian Goldberg
+#
+# This file is part of Sphinx.
+#
+# Sphinx is free software: you can redistribute it and/or modify
+# it under the terms of version 3 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# Sphinx is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with Sphinx. If not, see
+# .
+
+The LIONESS implementation and the xcounter CTR mode class are adapted
+from "Experimental implementation of the sphinx cryptographic mix packet
+format by George Danezis".
+
+Enabling ECC (curve25519) support:
+
+- You'll need swig, as well as a populated /usr/include/python-2.6
+ directory.
+- cd curve25519 && make && cd ..
+
+Testing:
+
+- ./SphinxClient.py or ./SphinxClient.py -ecc
+
+The latter is smaller and faster, but requires curve25519 support to be
+enabled.
diff --git a/SphinxClient.py b/SphinxClient.py
new file mode 100755
index 0000000..154dd04
--- /dev/null
+++ b/SphinxClient.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+
+# Copyright 2011 Ian Goldberg
+#
+# This file is part of Sphinx.
+#
+# Sphinx is free software: you can redistribute it and/or modify
+# it under the terms of version 3 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# Sphinx is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with Sphinx. If not, see
+# .
+
+import sys
+import os
+from SphinxParams import SphinxParams
+from SphinxNode import SphinxNode, Denc, Dspec, pad_body, unpad_body
+from SphinxNymserver import Nymserver
+
+def rand_subset(lst, nu):
+ """Return a list of nu random elements of the given list (without
+ replacement)."""
+
+ # Randomize the order of the list by sorting on a random key
+ nodeids = [(os.urandom(8),x) for x in lst]
+ nodeids.sort(key=lambda x:x[0])
+
+ # Return the first nu elements of the randomized list
+ return map(lambda x:x[1], nodeids[:nu])
+
+
+def create_header(params, nodelist, dest, id):
+ p = params
+ pki = p.pki
+ nu = len(nodelist)
+ assert nu <= p.r
+ assert len(id) == p.k
+ assert len(dest) <= 2 * (p.r - nu + 1) * p.k
+ group = p.group
+ x = group.gensecret()
+
+ # Compute the (alpha, s, b) tuples
+ blinds = [x]
+ asbtuples = []
+ for node in nodelist:
+ alpha = group.multiexpon(group.g, blinds)
+ s = group.multiexpon(pki[node].y, blinds)
+ b = p.hb(alpha,s)
+ blinds.append(b)
+ asbtuples.append({ 'alpha': alpha, 's': s, 'b': b})
+
+ # Compute the filler strings
+ phi = ''
+ for i in xrange(1,nu):
+ min = (2*(p.r-i)+3)*p.k
+ phi = p.xor(phi + ("\x00" * (2*p.k)),
+ p.rho(p.hrho(asbtuples[i-1]['s']))[min:])
+ # print i,phi.encode("hex")
+
+ # Compute the (beta, gamma) tuples
+ # The os.urandom used to be a string of 0x00 bytes, but that's wrong
+ beta = dest + id + os.urandom(((2 * (p.r - nu) + 2)*p.k - len(dest)))
+ beta = p.xor(beta,
+ p.rho(p.hrho(asbtuples[nu-1]['s']))[:(2*(p.r-nu)+3)*p.k]) + phi
+ gamma = p.mu(p.hmu(asbtuples[nu-1]['s']), beta)
+ # print "s =", group.printable(asbtuples[i]['s'])
+ # print "beta = ", beta.encode("hex")
+ # print "gamma = ", gamma.encode("hex")
+ for i in xrange(nu-2, -1, -1):
+ id = nodelist[i+1]
+ assert len(id) == p.k
+ beta = p.xor(id + gamma + beta[:(2*p.r-1)*p.k],
+ p.rho(p.hrho(asbtuples[i]['s']))[:(2*p.r+1)*p.k])
+ gamma = p.mu(p.hmu(asbtuples[i]['s']), beta)
+ # print pki[id].name
+ # print "s =", group.printable(asbtuples[i]['s'])
+ # print "beta = ", beta.encode("hex")
+ # print "gamma = ", gamma.encode("hex")
+
+ return (asbtuples[0]['alpha'], beta, gamma), \
+ [x['s'] for x in asbtuples]
+
+
+def create_forward_message(params, nodelist, dest, msg):
+ p = params
+ pki = p.pki
+ nu = len(nodelist)
+ assert len(dest) < 128 and len(dest) > 0
+ assert p.k + 1 + len(dest) + len(msg) < p.m
+
+ # Compute the header and the secrets
+ header, secrets = create_header(params, nodelist, Dspec,
+ "\x00" * p.k)
+
+ body = pad_body(p.m, ("\x00" * p.k) + Denc(dest) + msg)
+
+ # Compute the delta values
+ delta = p.pi(p.hpi(secrets[nu-1]), body)
+ for i in xrange(nu-2, -1, -1):
+ delta = p.pi(p.hpi(secrets[i]), delta)
+
+ return header, delta
+
+def create_surb(params, nodelist, dest):
+ p = params
+ pki = p.pki
+ nu = len(nodelist)
+ id = os.urandom(p.k)
+
+ # Compute the header and the secrets
+ header, secrets = create_header(params, nodelist, Denc(dest), id)
+
+ ktilde = os.urandom(p.k)
+ keytuple = [ktilde]
+ keytuple.extend(map(p.hpi, secrets))
+ return id, keytuple, (nodelist[0], header, ktilde)
+
+
+class SphinxClient:
+ def __init__(self, params):
+ self.id = "Client " + os.urandom(4).encode("hex")
+ self.params = params
+ params.clients[self.id] = self
+ self.keytable = {}
+
+ def create_nym(self, nym, nllength):
+ """Create a SURB for the given nym (passing through nllength
+ nodes), and send it to the nymserver."""
+
+ # Pick the list of nodes to use
+ nodelist = rand_subset(self.params.pki.keys(), nllength)
+ id, keytuple, nymtuple = create_surb(self.params, nodelist, self.id)
+
+ self.keytable[id] = keytuple
+ self.params.nymserver.add_surb(nym, nymtuple)
+
+ def process(self, id, delta):
+ "Process a (still-encrypted) reply message"
+ p = self.params
+ keytuple = self.keytable.pop(id, None)
+ if keytuple == None:
+ print "Unreadable reply message received by [%s]" % self.id
+ return
+
+ ktilde = keytuple.pop(0)
+ nu = len(keytuple)
+ for i in xrange(nu-1, -1, -1):
+ delta = p.pi(keytuple[i], delta)
+ delta = p.pii(ktilde, delta)
+
+ if delta[:p.k] == ("\x00" * p.k):
+ msg = unpad_body(delta[p.k:])
+ print "[%s] received by [%s]" % (msg, self.id)
+ else:
+ print "Corrupted message received by [%s]" % self.id
+
+if __name__ == '__main__':
+ use_ecc = (len(sys.argv) > 1 and sys.argv[1] == "-ecc")
+ r = 5
+ params = SphinxParams(r, ecc=use_ecc)
+
+ # Create some nodes
+ for i in xrange(2*r):
+ SphinxNode(params)
+
+ # Create a client
+ client = SphinxClient(params)
+
+ # Pick a list of nodes to use
+ use_nodes = rand_subset(params.pki.keys(), r)
+
+ header, delta = create_forward_message(params, use_nodes, "dest", \
+ "this is a test")
+
+ # Send it to the first node for processing
+ params.pki[use_nodes[0]].process(header, delta)
+
+ # Create a reply block for the client
+ client.create_nym("cypherpunk", r)
+
+ # Send a message to it
+ params.nymserver.send_to_nym("cypherpunk", "this is a reply")
diff --git a/SphinxNode.py b/SphinxNode.py
new file mode 100755
index 0000000..2689e20
--- /dev/null
+++ b/SphinxNode.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python
+
+# Copyright 2011 Ian Goldberg
+#
+# This file is part of Sphinx.
+#
+# Sphinx is free software: you can redistribute it and/or modify
+# it under the terms of version 3 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# Sphinx is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with Sphinx. If not, see
+# .
+
+import os
+import re
+
+# Padding/unpadding of message bodies: a 0 bit, followed by as many 1
+# bits as it takes to fill it up
+
+def pad_body(msgtotalsize, body):
+ body = body + "\x7f"
+ body = body + ("\xff" * (msgtotalsize - len(body)))
+ return body
+
+def unpad_body(body):
+ return re.compile("\x7f\xff*$").sub('',body)
+
+# Prefix-free encoding/decoding of node names and destinations
+
+# The special destination
+Dspec = "\x00"
+
+# Any other destination. Must be between 1 and 127 bytes in length
+def Denc(dest):
+ assert len(dest) >= 1 and len(dest) <= 127
+ return chr(len(dest)) + dest
+
+# Sphinx nodes
+
+class SphinxNode:
+ def __Nenc(self, idnum):
+ id = "\xff" + idnum + ("\x00" * (self.p.k - len(idnum) - 1))
+ assert len(id) == self.p.k
+ return id
+
+ # Decode the prefix-free encoding. Return the type, value, and the
+ # remainder of the input string
+ def __PFdecode(self, s):
+ if s == "": return None, None, None
+ if s[0] == '\x00': return 'Dspec', None, s[1:]
+ if s[0] == '\xff': return 'node', s[:self.p.k], s[self.p.k:]
+ l = ord(s[0])
+ if l < 128: return 'dest', s[1:l+1], s[l+1:]
+ return None, None, None
+
+ def __init__(self, params):
+ self.p = params
+ group = self.p.group
+ self.__x = group.gensecret()
+ self.y = group.expon(group.g, self.__x)
+ idnum = os.urandom(4)
+ self.id = self.__Nenc(idnum)
+ self.name = "Node " + idnum.encode("hex")
+ self.seen = {}
+ params.pki[self.id] = self
+
+ def process(self, header, delta):
+ print "Processing at", self.name
+ p = self.p
+ pki = p.pki
+ group = p.group
+ alpha, beta, gamma = header
+
+ # Check that alpha is in the group
+ if not group.in_group(alpha):
+ return
+
+ # Compute the shared secret
+ s = group.expon(alpha, self.__x)
+
+ # Have we seen it already?
+ tag = p.htau(s)
+
+ if tag in self.seen:
+ return
+
+ if gamma != p.mu(p.hmu(s), beta):
+ print "MAC mismatch!"
+ print "alpha =", group.printable(alpha)
+ print "s =", group.printable(s)
+ print "beta =", beta.encode("hex")
+ print "gamma =", gamma.encode("hex")
+ return
+
+ self.seen[tag] = 1
+
+ B = p.xor(beta + ("\x00" * (2 * p.k)), p.rho(p.hrho(s)))
+
+ type, val, rest = self.__PFdecode(B)
+
+ if type == "node":
+ print "Next hop is", pki[val].name
+ b = p.hb(alpha, s)
+ alpha = group.expon(alpha, b)
+ gamma = B[p.k:p.k*2]
+ beta = B[p.k*2:]
+ delta = p.pii(p.hpi(s), delta)
+ return pki[val].process((alpha, beta, gamma), delta)
+
+ if type == "Dspec":
+ # Uncomment the following to see what the exit node sees
+ # print ' '.join(["%02x"%ord(x) for x in B])
+ delta = p.pii(p.hpi(s), delta)
+ if delta[:p.k] == ("\x00" * p.k):
+ type, val, rest = self.__PFdecode(delta[p.k:])
+ if type == "dest":
+ # We're to deliver rest (unpadded) to val
+ body = unpad_body(rest)
+ print "Deliver [%s] to [%s]" % (body, val)
+ return
+
+ if type == "dest":
+ id = rest[:p.k]
+ delta = p.pii(p.hpi(s), delta)
+ print "Deliver reply message to [%s]" % val
+ if val in p.clients:
+ return p.clients[val].process(id, delta)
+ else:
+ print "No such client [%s]" % val
+ return
+
+if __name__ == '__main__':
+
+ from SphinxParams import SphinxParams
+
+ p = SphinxParams()
+ n = SphinxNode(p)
+
+ print "name =", n.name
+ print "y =", n.y
diff --git a/SphinxNymserver.py b/SphinxNymserver.py
new file mode 100755
index 0000000..c49920c
--- /dev/null
+++ b/SphinxNymserver.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright 2011 Ian Goldberg
+#
+# This file is part of Sphinx.
+#
+# Sphinx is free software: you can redistribute it and/or modify
+# it under the terms of version 3 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# Sphinx is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with Sphinx. If not, see
+# .
+
+from SphinxNode import pad_body
+
+class Nymserver:
+ def __init__(self, params):
+ self.params = params
+ self.database = {}
+
+ def add_surb(self, nym, nymtuple):
+ db = self.database
+ if nym in db:
+ db[nym].append(nymtuple)
+ else:
+ db[nym] = [nymtuple]
+
+ def send_to_nym(self, nym, message):
+ p = self.params
+ pki = p.pki
+ db = self.database
+ print "Nymserver received message for [%s]" % nym
+ if nym in db and len(db[nym]) > 0:
+ n0, header0, ktilde = db[nym].pop(0)
+ body = p.pi(ktilde, pad_body(p.m, ("\x00" * p.k) + message))
+ pki[n0].process(header0, body)
+ else:
+ print "No SURBs available for nym [%s]" % nym
diff --git a/SphinxParams.py b/SphinxParams.py
new file mode 100755
index 0000000..d9f767e
--- /dev/null
+++ b/SphinxParams.py
@@ -0,0 +1,242 @@
+#!/usr/bin/env python
+
+# Copyright 2011 Ian Goldberg
+#
+# This file is part of Sphinx.
+#
+# Sphinx is free software: you can redistribute it and/or modify
+# it under the terms of version 3 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# Sphinx is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with Sphinx. If not, see
+# .
+#
+# The LIONESS implementation and the xcounter CTR mode class are adapted
+# from "Experimental implementation of the sphinx cryptographic mix
+# packet format by George Danezis".
+
+import os
+from SphinxNymserver import Nymserver
+
+try:
+ from Crypto.Cipher import AES
+ from Crypto.Hash import SHA256, HMAC
+ from Crypto.Util import number
+except:
+ print "\n\n*** You need to install the Python Cryptography Toolkit. ***\n\n"
+ raise
+
+try:
+ from curvedh import *
+except:
+ pass
+
+class Group_p:
+ "Group operations mod p"
+
+ def __init__(self):
+ # A 2048-bit prime
+ self.__p = 19134104382515471340121383082934308828788465164876922483018046665189568608385336521385528455852870226729419515782445769946311524543401780679763787388729547181989737060289407062479214017446428251157469940819568673215805731815521523529008837868909929585628774673216239536406270201585439559139691697966359990510412034461369768357756615060575177060679433618196595458284826534928911045879135540240765445688036648761768417624100416438042808407759355983611319236017991473072964105392335897160201662655194201702312372678481213560443558381777521284259428911914008097936688649209670009892790669991823472515537714171774700422727L
+
+ # A 256-bit prime. q | p-1, and (p-1)/(2q) is also prime
+ self.__q = 106732665057690615308701680462846682779480968671143352109289849544853387479559L
+
+ # A generator of the 256-bit subgroup of order q
+ self.g = 4841394417863494412227539373591815072221868474834407003108964621656948087607533132014406209384264001860614005413470474998618595063750798301826341774223008476018405743602814857378470614748174056572493655989586557587396511347276474665778845699406935799833636365083206218330593315513720711460353255243954204178057633122609221947354829869069875474221603457407347332029203573680170785191212685833773827500371044142146648183369300927714600114538209692069873794191715382617278768149594654315895296485533292574866819385073141870483659577707892565451842181763727355979252885729688362656338077037492411991956527093735651034592L
+
+ def gensecret(self):
+ return number.bytes_to_long(os.urandom(256)) % self.__q
+
+ def expon(self, base, exp):
+ return pow(base, exp, self.__p)
+
+ def multiexpon(self, base, exps):
+ return pow(base, reduce(lambda x,y: x*y % self.__q, exps), self.__p)
+
+ def makeexp(self, data):
+ return number.bytes_to_long(data) % self.__q
+
+ def in_group(self, alpha):
+ return alpha > 1 and alpha < (self.__p - 1) and \
+ pow(alpha, self.__q, self.__p) == 1
+
+ def printable(self, alpha):
+ return str(alpha)
+
+class Group_ECC:
+ "Group operations in ECC"
+
+ def __init__(self):
+
+ self.g = basepoint()
+
+ def gensecret(self):
+ return makesecret(os.urandom(32))
+
+ def expon(self, base, exp):
+ return curvedh(exp, base)
+
+ def multiexpon(self, base, exps):
+ baseandexps = [base]
+ baseandexps.extend(exps)
+ return reduce(self.expon, baseandexps)
+
+ def makeexp(self, data):
+ assert len(data) == 32
+ return makesecret(data)
+
+ def in_group(self, alpha):
+ # All strings of length 32 are in the group, says DJB
+ return len(alpha) == 32
+
+ def printable(self, alpha):
+ return alpha.encode("hex")
+
+class SphinxParams:
+ k = 16 # in bytes, == 128 bits
+ m = 1024 # size of message body, in bytes
+ pki = {} # mapping of node id to node
+ clients = {} # mapping of destinations to clients
+
+ def __init__(self, r=5, ecc=False):
+ self.r = r
+ if ecc:
+ self.group = Group_ECC()
+ else:
+ self.group = Group_p()
+
+ self.nymserver = Nymserver(self)
+
+ def xor(self, str1, str2):
+ # XOR two strings
+ assert len(str1) == len(str2)
+ return HMAC._strxor(str1,str2)
+
+ class xcounter:
+ # Implements a string counter to do AES-CTR mode
+ i = 0
+ def __init__(self, size):
+ self.size = size
+
+ def __call__(self):
+ ii = number.long_to_bytes(self.i)
+ ii = '\x00' * (self.size-len(ii)) + ii
+ self.i += 1
+ return ii
+
+ # The LIONESS PRP
+
+ def lioness_enc(self, key, message):
+ assert len(key) == self.k
+ assert len(message) >= self.k * 2
+ # Round 1
+ r1 = self.xor(self.hash(message[self.k:]+key+'1')[:self.k],
+ message[:self.k]) + message[self.k:]
+
+ # Round 2
+ k2 = self.xor(r1[:self.k], key)
+ c = AES.new(k2, AES.MODE_CTR, counter=self.xcounter(self.k))
+ r2 = r1[:self.k] + c.encrypt(r1[self.k:])
+
+ # Round 3
+ r3 = self.xor(self.hash(r2[self.k:]+key+'3')[:self.k], r2[:self.k]) + r2[self.k:]
+
+ # Round 4
+ k4 = self.xor(r3[:self.k], key)
+ c = AES.new(k4, AES.MODE_CTR, counter=self.xcounter(self.k))
+ r4 = r3[:self.k] + c.encrypt(r3[self.k:])
+
+ return r4
+
+ def lioness_dec(self, key, message):
+ assert len(key) == self.k
+ assert len(message) >= self.k * 2
+
+ r4 = message
+
+ # Round 4
+ k4 = self.xor(r4[:self.k], key)
+ c = AES.new(k4, AES.MODE_CTR, counter=self.xcounter(self.k))
+ r3 = r4[:self.k] + c.encrypt(r4[self.k:])
+
+ # Round 3
+ r2 = self.xor(self.hash(r3[self.k:]+key+'3')[:self.k], r3[:self.k]) + r3[self.k:]
+
+ # Round 2
+ k2 = self.xor(r2[:self.k], key)
+ c = AES.new(k2, AES.MODE_CTR, counter=self.xcounter(self.k))
+ r1 = r2[:self.k] + c.encrypt(r2[self.k:])
+
+ # Round 1
+ r0 = self.xor(self.hash(r1[self.k:]+key+'1')[:self.k], r1[:self.k]) + r1[self.k:]
+
+ return r0
+
+ # The PRG; key is of length k, output is of length (2r+3)k
+ def rho(self, key):
+ assert len(key) == self.k
+ c = AES.new(key, AES.MODE_CTR, counter=self.xcounter(self.k))
+ return c.encrypt("\x00" * ( (2 * self.r + 3) * self.k ))
+
+ # The HMAC; key is of length k, output is of length k
+ def mu(self, key, data):
+ m = HMAC.new(key, msg=data, digestmod=SHA256)
+ return m.digest()[:self.k]
+
+ # The PRP; key is of length k, data is of length m
+ def pi(self, key, data):
+ assert len(key) == self.k
+ assert len(data) == self.m
+
+ return self.lioness_enc(key, data)
+
+ # The inverse PRP; key is of length k, data is of length m
+ def pii(self, key, data):
+ assert len(key) == self.k
+ assert len(data) == self.m
+
+ return self.lioness_dec(key, data)
+
+ # The various hashes
+
+ def hash(self, data):
+ h = SHA256.new()
+ h.update(data)
+ return h.digest()
+
+ def hb(self, alpha, s):
+ "Compute a hash of alpha and s to use as a blinding factor"
+ group = self.group
+ return group.makeexp(self.hash("hb:" + group.printable(alpha)
+ + " , " + group.printable(s)))
+
+ def hrho(self, s):
+ "Compute a hash of s to use as a key for the PRG rho"
+ group = self.group
+ return (self.hash("hrho:" + group.printable(s)))[:self.k]
+
+ def hmu(self, s):
+ "Compute a hash of s to use as a key for the HMAC mu"
+ group = self.group
+ return (self.hash("hmu:" + group.printable(s)))[:self.k]
+
+ def hpi(self, s):
+ "Compute a hash of s to use as a key for the PRP pi"
+ group = self.group
+ return (self.hash("hpi:" + group.printable(s)))[:self.k]
+
+ def htau(self, s):
+ "Compute a hash of s to use to see if we've seen s before"
+ group = self.group
+ return (self.hash("htau:" + group.printable(s)))
+
+if __name__ == '__main__':
+ p = SphinxParams(5, True)
+ print p.hb(p.group.g, p.group.g).encode("hex")
+ print p.rho("1234" * 4).encode("hex")
diff --git a/curve25519/Makefile b/curve25519/Makefile
new file mode 100644
index 0000000..c351139
--- /dev/null
+++ b/curve25519/Makefile
@@ -0,0 +1,95 @@
+# Copyright 2011 Ian Goldberg
+#
+# This file is part of Sphinx.
+#
+# Sphinx is free software: you can redistribute it and/or modify
+# it under the terms of version 3 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# Sphinx is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with Sphinx. If not, see
+# .
+
+CURVE25519_OBJ = curve25519_athlon.o \
+ curve25519_athlon_const.o \
+ curve25519_athlon_fromdouble.o \
+ curve25519_athlon_init.o \
+ curve25519_athlon_mainloop.o \
+ curve25519_athlon_mult.o \
+ curve25519_athlon_square.o \
+ curve25519_athlon_todouble.o
+
+all: _curvedh.so curvedh.py
+ cp $^ ..
+
+_curvedh.so: curve25519.so curve25519.h curvedh.c curvedh_wrap.c curvedh.h
+ gcc -fno-stack-protector -fpic -c curvedh.c curvedh_wrap.c -I/usr/include/python2.6
+ ld -I/usr/include/python2.6 -shared curvedh_wrap.o curvedh.o $(CURVE25519_OBJ) -o _curvedh.so
+
+curvedh.py curvedh_wrap.c: curvedh.i curvedh.h
+ swig -python curvedh.i
+
+clean: curveclean
+ -rm -f _curvedh.so curvedh_wrap.c curvedh.py
+
+# curve25519 Makefile.lib version 20050915
+# D. J. Bernstein
+# Public domain.
+
+curve25519: curve25519.so curve25519.a curve25519.h
+
+curveclean:
+ -rm -f curve25519.so curve25519.a curve25519.h $(CURVE25519_OBJ)
+
+curve25519.h: curve25519.impl \
+curve25519.h.do
+ sh -e curve25519.h.do > curve25519.h.new
+ mv curve25519.h.new curve25519.h
+
+curve25519.so: curve25519.impl \
+curve25519.so.do \
+curve25519_athlon.h \
+curve25519_athlon.c \
+curve25519_athlon_const.s \
+curve25519_athlon_fromdouble.s \
+curve25519_athlon_init.s \
+curve25519_athlon_mainloop.s \
+curve25519_athlon_mult.s \
+curve25519_athlon_square.s \
+curve25519_athlon_todouble.s
+ sh -e curve25519.so.do $(CC)
+
+curve25519.a: curve25519.impl \
+curve25519.a.do \
+curve25519_athlon.h \
+curve25519_athlon.c \
+curve25519_athlon_const.s \
+curve25519_athlon_fromdouble.s \
+curve25519_athlon_init.s \
+curve25519_athlon_mainloop.s \
+curve25519_athlon_mult.s \
+curve25519_athlon_square.s \
+curve25519_athlon_todouble.s
+ sh -e curve25519.a.do $(CC) > curve25519.a.new
+ mv curve25519.a.new curve25519.a
+
+curve25519.impl: \
+curve25519.impl.do \
+x86cpuid.c \
+curve25519.impl.check.c \
+curve25519_athlon.h \
+curve25519_athlon.c \
+curve25519_athlon_const.s \
+curve25519_athlon_fromdouble.s \
+curve25519_athlon_init.s \
+curve25519_athlon_mainloop.s \
+curve25519_athlon_mult.s \
+curve25519_athlon_square.s \
+curve25519_athlon_todouble.s
+ sh -e curve25519.impl.do $(CC) > curve25519.impl.new
+ mv curve25519.impl.new curve25519.impl
diff --git a/curve25519/curve25519.a.do b/curve25519/curve25519.a.do
new file mode 100644
index 0000000..79915cf
--- /dev/null
+++ b/curve25519/curve25519.a.do
@@ -0,0 +1,34 @@
+# curve25519.a.do version 20050915
+# D. J. Bernstein
+# Public domain.
+
+rm -f curve25519tmp.a
+
+impl=`cat curve25519.impl`
+
+case ${impl} in
+ athlon)
+ $* -c curve25519_${impl}.c
+ $* -c curve25519_${impl}_const.s
+ $* -c curve25519_${impl}_fromdouble.s
+ $* -c curve25519_${impl}_init.s
+ $* -c curve25519_${impl}_mainloop.s
+ $* -c curve25519_${impl}_mult.s
+ $* -c curve25519_${impl}_square.s
+ $* -c curve25519_${impl}_todouble.s
+ ar cr curve25519tmp.a \
+ curve25519_${impl}.o \
+ curve25519_${impl}_const.o \
+ curve25519_${impl}_fromdouble.o \
+ curve25519_${impl}_init.o \
+ curve25519_${impl}_mainloop.o \
+ curve25519_${impl}_mult.o \
+ curve25519_${impl}_square.o \
+ curve25519_${impl}_todouble.o
+ ;;
+ *) echo 'unknown implementation' >&2; exit 1 ;;
+esac
+
+ranlib curve25519tmp.a >/dev/null 2>/dev/null || :
+cat curve25519tmp.a
+rm curve25519tmp.a
diff --git a/curve25519/curve25519.h.do b/curve25519/curve25519.h.do
new file mode 100644
index 0000000..e0edd91
--- /dev/null
+++ b/curve25519/curve25519.h.do
@@ -0,0 +1,8 @@
+# curve25519.h.do version 20050915
+# D. J. Bernstein
+# Public domain.
+
+case `cat curve25519.impl` in
+ athlon) echo '#include "curve25519_athlon.h"' ;;
+ *) echo 'unknown implementation' >&2; exit 1 ;;
+esac
diff --git a/curve25519/curve25519.impl b/curve25519/curve25519.impl
new file mode 100644
index 0000000..a184471
--- /dev/null
+++ b/curve25519/curve25519.impl
@@ -0,0 +1 @@
+athlon
diff --git a/curve25519/curve25519.impl.check.c b/curve25519/curve25519.impl.check.c
new file mode 100644
index 0000000..10f8c30
--- /dev/null
+++ b/curve25519/curve25519.impl.check.c
@@ -0,0 +1,42 @@
+/*
+curve25519.impl.check.c version 20050915
+D. J. Bernstein
+Public domain.
+*/
+
+#include "curve25519.impl.check.h"
+
+unsigned char e1k[32];
+unsigned char e2k[32];
+unsigned char e1e2k[32];
+unsigned char e2e1k[32];
+unsigned char e1[32] = {3};
+unsigned char e2[32] = {5};
+unsigned char k[32] = {9};
+
+unsigned char exp[32] = {
+ 0xbe,0x4c,0x62,0x08,0x29,0x3f,0x81,0x1a,
+ 0x15,0x4b,0x9c,0x42,0xf7,0x87,0xdd,0x90,
+ 0x9f,0x07,0x5c,0x61,0x1b,0x82,0xc3,0x03,
+ 0x50,0xed,0xc9,0xfe,0x6e,0x83,0xad,0x4a,
+};
+
+main()
+{
+ int loop;
+ int i;
+
+ for (loop = 0;loop < 10;++loop) {
+ curve25519(e1k,e1,k);
+ curve25519(e2e1k,e2,e1k);
+ curve25519(e2k,e2,k);
+ curve25519(e1e2k,e1,e2k);
+ for (i = 0;i < 32;++i) if (e1e2k[i] != e2e1k[i]) return 1;
+ for (i = 0;i < 32;++i) e1[i] ^= e2k[i];
+ for (i = 0;i < 32;++i) e2[i] ^= e1k[i];
+ for (i = 0;i < 32;++i) k[i] ^= e1e2k[i];
+ }
+ for (i = 0;i < 32;++i) if (e1e2k[i] != exp[i]) return 1;
+
+ return 0;
+}
diff --git a/curve25519/curve25519.impl.check.h b/curve25519/curve25519.impl.check.h
new file mode 100644
index 0000000..23d0a21
--- /dev/null
+++ b/curve25519/curve25519.impl.check.h
@@ -0,0 +1 @@
+#include "curve25519_athlon.h"
diff --git a/curve25519/curve25519.impl.do b/curve25519/curve25519.impl.do
new file mode 100644
index 0000000..79151df
--- /dev/null
+++ b/curve25519/curve25519.impl.do
@@ -0,0 +1,25 @@
+# curve25519.impl.do version 20050915
+# D. J. Bernstein
+# Public domain.
+
+echo '#include "curve25519_athlon.h"' > curve25519.impl.check.h
+if gcc -o x86cpuid x86cpuid.c >/dev/null 2>&1 \
+&& ./x86cpuid > x86cpuid.out \
+&& $* -o curve25519.impl.check curve25519.impl.check.c \
+curve25519_athlon.c \
+curve25519_athlon_const.s \
+curve25519_athlon_fromdouble.s \
+curve25519_athlon_init.s \
+curve25519_athlon_mainloop.s \
+curve25519_athlon_mult.s \
+curve25519_athlon_square.s \
+curve25519_athlon_todouble.s \
+>/dev/null 2>&1 \
+&& ./curve25519.impl.check
+then
+ echo athlon
+ exit 0
+fi
+
+echo 'curve25519.impl.do: fatal: all tests failed! unsupported platform or compiler' >&2
+exit 1
diff --git a/curve25519/curve25519.so.do b/curve25519/curve25519.so.do
new file mode 100644
index 0000000..027377f
--- /dev/null
+++ b/curve25519/curve25519.so.do
@@ -0,0 +1,30 @@
+# curve25519.so.do version 20081027
+# Ian Goldberg
+# based on curve25519.a.do version 20050915:
+# D. J. Bernstein
+# Public domain.
+
+impl=`cat curve25519.impl`
+
+case ${impl} in
+ athlon)
+ $* -fpic -c curve25519_${impl}.c
+ $* -fpic -c curve25519_${impl}_const.s
+ $* -fpic -c curve25519_${impl}_fromdouble.s
+ $* -fpic -c curve25519_${impl}_init.s
+ $* -fpic -c curve25519_${impl}_mainloop.s
+ $* -fpic -c curve25519_${impl}_mult.s
+ $* -fpic -c curve25519_${impl}_square.s
+ $* -fpic -c curve25519_${impl}_todouble.s
+ gcc -shared -o curve25519.so \
+ curve25519_${impl}.o \
+ curve25519_${impl}_const.o \
+ curve25519_${impl}_fromdouble.o \
+ curve25519_${impl}_init.o \
+ curve25519_${impl}_mainloop.o \
+ curve25519_${impl}_mult.o \
+ curve25519_${impl}_square.o \
+ curve25519_${impl}_todouble.o
+ ;;
+ *) echo 'unknown implementation' >&2; exit 1 ;;
+esac
diff --git a/curve25519/curve25519_athlon.c b/curve25519/curve25519_athlon.c
new file mode 100644
index 0000000..bb5e6da
--- /dev/null
+++ b/curve25519/curve25519_athlon.c
@@ -0,0 +1,84 @@
+#include "curve25519_athlon.h"
+
+#define mult curve25519_mult
+#define square curve25519_square
+
+void curve25519_athlon_recip(double out[10],const double z[10])
+{
+ double z2[10];
+ double z9[10];
+ double z11[10];
+ double z2_5_0[10];
+ double z2_10_0[10];
+ double z2_20_0[10];
+ double z2_50_0[10];
+ double z2_100_0[10];
+ double t0[10];
+ double t1[10];
+ int i;
+
+ /* 2 */ square(z2,z);
+ /* 4 */ square(t1,z2);
+ /* 8 */ square(t0,t1);
+ /* 9 */ mult(z9,t0,z);
+ /* 11 */ mult(z11,z9,z2);
+ /* 22 */ square(t0,z11);
+ /* 2^5 - 2^0 = 31 */ mult(z2_5_0,t0,z9);
+
+ /* 2^6 - 2^1 */ square(t0,z2_5_0);
+ /* 2^7 - 2^2 */ square(t1,t0);
+ /* 2^8 - 2^3 */ square(t0,t1);
+ /* 2^9 - 2^4 */ square(t1,t0);
+ /* 2^10 - 2^5 */ square(t0,t1);
+ /* 2^10 - 2^0 */ mult(z2_10_0,t0,z2_5_0);
+
+ /* 2^11 - 2^1 */ square(t0,z2_10_0);
+ /* 2^12 - 2^2 */ square(t1,t0);
+ /* 2^20 - 2^10 */ for (i = 2;i < 10;i += 2) { square(t0,t1); square(t1,t0); }
+ /* 2^20 - 2^0 */ mult(z2_20_0,t1,z2_10_0);
+
+ /* 2^21 - 2^1 */ square(t0,z2_20_0);
+ /* 2^22 - 2^2 */ square(t1,t0);
+ /* 2^40 - 2^20 */ for (i = 2;i < 20;i += 2) { square(t0,t1); square(t1,t0); }
+ /* 2^40 - 2^0 */ mult(t0,t1,z2_20_0);
+
+ /* 2^41 - 2^1 */ square(t1,t0);
+ /* 2^42 - 2^2 */ square(t0,t1);
+ /* 2^50 - 2^10 */ for (i = 2;i < 10;i += 2) { square(t1,t0); square(t0,t1); }
+ /* 2^50 - 2^0 */ mult(z2_50_0,t0,z2_10_0);
+
+ /* 2^51 - 2^1 */ square(t0,z2_50_0);
+ /* 2^52 - 2^2 */ square(t1,t0);
+ /* 2^100 - 2^50 */ for (i = 2;i < 50;i += 2) { square(t0,t1); square(t1,t0); }
+ /* 2^100 - 2^0 */ mult(z2_100_0,t1,z2_50_0);
+
+ /* 2^101 - 2^1 */ square(t1,z2_100_0);
+ /* 2^102 - 2^2 */ square(t0,t1);
+ /* 2^200 - 2^100 */ for (i = 2;i < 100;i += 2) { square(t1,t0); square(t0,t1); }
+ /* 2^200 - 2^0 */ mult(t1,t0,z2_100_0);
+
+ /* 2^201 - 2^1 */ square(t0,t1);
+ /* 2^202 - 2^2 */ square(t1,t0);
+ /* 2^250 - 2^50 */ for (i = 2;i < 50;i += 2) { square(t0,t1); square(t1,t0); }
+ /* 2^250 - 2^0 */ mult(t0,t1,z2_50_0);
+
+ /* 2^251 - 2^1 */ square(t1,t0);
+ /* 2^252 - 2^2 */ square(t0,t1);
+ /* 2^253 - 2^3 */ square(t1,t0);
+ /* 2^254 - 2^4 */ square(t0,t1);
+ /* 2^255 - 2^5 */ square(t1,t0);
+ /* 2^255 - 21 */ mult(out,t1,z11);
+}
+
+void curve25519_athlon(unsigned char ek[32],
+ const unsigned char e[32],
+ const unsigned char k[32])
+{
+ double work[30];
+ curve25519_athlon_init();
+ curve25519_athlon_todouble(work,k);
+ curve25519_athlon_mainloop(work,e);
+ curve25519_athlon_recip(work + 10,work + 10);
+ mult(work + 20,work,work + 10);
+ curve25519_athlon_fromdouble(ek,work + 20);
+}
diff --git a/curve25519/curve25519_athlon.h b/curve25519/curve25519_athlon.h
new file mode 100644
index 0000000..f711422
--- /dev/null
+++ b/curve25519/curve25519_athlon.h
@@ -0,0 +1,33 @@
+/*
+curve25519_athlon.h version 20050915
+D. J. Bernstein
+Public domain.
+*/
+
+#ifndef CURVE25519_ATHLON
+#define CURVE25519_ATHLON
+
+extern void curve25519_athlon(unsigned char *,const unsigned char *,const unsigned char *);
+
+/* internal functions, exposed purely for testing */
+extern void curve25519_athlon_init(void);
+extern void curve25519_athlon_mainloop(double *,const unsigned char *);
+extern void curve25519_athlon_recip(double *,const double *);
+extern void curve25519_athlon_square(double *,const double *);
+extern void curve25519_athlon_mult(double *,const double *,const double *);
+extern void curve25519_athlon_todouble(double *,const unsigned char *);
+extern void curve25519_athlon_fromdouble(unsigned char *,const double *);
+
+#ifndef curve25519_implementation
+#define curve25519_implementation "curve25519_athlon"
+#define curve25519 curve25519_athlon
+#define curve25519_init curve25519_athlon_init
+#define curve25519_mainloop curve25519_athlon_mainloop
+#define curve25519_recip curve25519_athlon_recip
+#define curve25519_square curve25519_athlon_square
+#define curve25519_mult curve25519_athlon_mult
+#define curve25519_todouble curve25519_athlon_todouble
+#define curve25519_fromdouble curve25519_athlon_fromdouble
+#endif
+
+#endif
diff --git a/curve25519/curve25519_athlon_const.s b/curve25519/curve25519_athlon_const.s
new file mode 100644
index 0000000..b5dd493
--- /dev/null
+++ b/curve25519/curve25519_athlon_const.s
@@ -0,0 +1,114 @@
+.data
+.section .rodata
+.p2align 5
+
+.globl curve25519_athlon_scale
+.globl curve25519_athlon_121665
+.globl curve25519_athlon_alpha26
+.globl curve25519_athlon_alpha51
+.globl curve25519_athlon_alpha77
+.globl curve25519_athlon_alpha102
+.globl curve25519_athlon_alpha128
+.globl curve25519_athlon_alpha153
+.globl curve25519_athlon_alpha179
+.globl curve25519_athlon_alpha204
+.globl curve25519_athlon_alpha230
+.globl curve25519_athlon_alpha255
+.globl curve25519_athlon_in0offset
+.globl curve25519_athlon_in1offset
+.globl curve25519_athlon_in2offset
+.globl curve25519_athlon_in3offset
+.globl curve25519_athlon_in4offset
+.globl curve25519_athlon_in5offset
+.globl curve25519_athlon_in6offset
+.globl curve25519_athlon_in7offset
+.globl curve25519_athlon_in8offset
+.globl curve25519_athlon_in9offset
+.globl curve25519_athlon_out0offset
+.globl curve25519_athlon_out1offset
+.globl curve25519_athlon_out2offset
+.globl curve25519_athlon_out3offset
+.globl curve25519_athlon_out4offset
+.globl curve25519_athlon_out5offset
+.globl curve25519_athlon_out6offset
+.globl curve25519_athlon_out7offset
+.globl curve25519_athlon_out8offset
+.globl curve25519_athlon_out9offset
+.globl curve25519_athlon_two0
+.globl curve25519_athlon_two1
+.globl curve25519_athlon_zero
+.globl curve25519_athlon_rounding
+
+curve25519_athlon_scale:
+ .long 0x0,0x30430000
+curve25519_athlon_121665:
+ .long 0x0,0x40fdb410
+curve25519_athlon_in0offset:
+ .long 0x0,0x43300000
+curve25519_athlon_in1offset:
+ .long 0x0,0x45300000
+curve25519_athlon_in2offset:
+ .long 0x0,0x46b00000
+curve25519_athlon_in3offset:
+ .long 0x0,0x48300000
+curve25519_athlon_in4offset:
+ .long 0x0,0x49b00000
+curve25519_athlon_in5offset:
+ .long 0x0,0x4b300000
+curve25519_athlon_in6offset:
+ .long 0x0,0x4d300000
+curve25519_athlon_in7offset:
+ .long 0x0,0x4eb00000
+curve25519_athlon_in8offset:
+ .long 0x0,0x50300000
+curve25519_athlon_in9offset:
+ .long 0x0,0x51b00000
+curve25519_athlon_alpha26:
+ .long 0x0,0x45880000
+curve25519_athlon_alpha51:
+ .long 0x0,0x47180000
+curve25519_athlon_alpha77:
+ .long 0x0,0x48b80000
+curve25519_athlon_alpha102:
+ .long 0x0,0x4a480000
+curve25519_athlon_alpha128:
+ .long 0x0,0x4be80000
+curve25519_athlon_alpha153:
+ .long 0x0,0x4d780000
+curve25519_athlon_alpha179:
+ .long 0x0,0x4f180000
+curve25519_athlon_alpha204:
+ .long 0x0,0x50a80000
+curve25519_athlon_alpha230:
+ .long 0x0,0x52480000
+curve25519_athlon_alpha255:
+ .long 0x0,0x53d80000
+curve25519_athlon_two0:
+ .long 0x0,0x3ff00000
+curve25519_athlon_two1:
+ .long 0x0,0x40000000
+curve25519_athlon_zero:
+ .long 0x0,0x0
+curve25519_athlon_out0offset:
+ .long 0x1fffffed,0x43380000
+curve25519_athlon_out1offset:
+ .long 0xffffff8,0x44d80000
+curve25519_athlon_out2offset:
+ .long 0x1ffffff8,0x46680000
+curve25519_athlon_out3offset:
+ .long 0xffffff8,0x48080000
+curve25519_athlon_out4offset:
+ .long 0x1ffffff8,0x49980000
+curve25519_athlon_out5offset:
+ .long 0xffffff8,0x4b380000
+curve25519_athlon_out6offset:
+ .long 0x1ffffff8,0x4cc80000
+curve25519_athlon_out7offset:
+ .long 0xffffff8,0x4e680000
+curve25519_athlon_out8offset:
+ .long 0x1ffffff8,0x4ff80000
+curve25519_athlon_out9offset:
+ .long 0x1fffff8,0x51980000
+curve25519_athlon_rounding:
+ .byte 0x7f
+ .byte 0x13
diff --git a/curve25519/curve25519_athlon_fromdouble.s b/curve25519/curve25519_athlon_fromdouble.s
new file mode 100644
index 0000000..87cfbf7
--- /dev/null
+++ b/curve25519/curve25519_athlon_fromdouble.s
@@ -0,0 +1,195 @@
+.text
+.p2align 5
+.globl _curve25519_athlon_fromdouble
+.globl curve25519_athlon_fromdouble
+_curve25519_athlon_fromdouble:
+curve25519_athlon_fromdouble:
+mov %esp,%eax
+and $31,%eax
+add $192,%eax
+sub %eax,%esp
+movl %ebp,0(%esp)
+movl 8(%esp,%eax),%ecx
+fldl 0(%ecx)
+faddl curve25519_athlon_out0offset
+fstpl 96(%esp)
+fldl 8(%ecx)
+faddl curve25519_athlon_out1offset
+fstpl 104(%esp)
+fldl 16(%ecx)
+faddl curve25519_athlon_out2offset
+fstpl 112(%esp)
+fldl 24(%ecx)
+faddl curve25519_athlon_out3offset
+fstpl 120(%esp)
+fldl 32(%ecx)
+faddl curve25519_athlon_out4offset
+fstpl 128(%esp)
+fldl 40(%ecx)
+faddl curve25519_athlon_out5offset
+fstpl 136(%esp)
+fldl 48(%ecx)
+faddl curve25519_athlon_out6offset
+fstpl 144(%esp)
+fldl 56(%ecx)
+faddl curve25519_athlon_out7offset
+fstpl 152(%esp)
+fldl 64(%ecx)
+faddl curve25519_athlon_out8offset
+fstpl 160(%esp)
+fldl 72(%ecx)
+faddl curve25519_athlon_out9offset
+fstpl 168(%esp)
+movl 96(%esp),%ecx
+movl %ecx,4(%esp)
+movl 104(%esp),%ecx
+shl $26,%ecx
+movl %ecx,40(%esp)
+movl 104(%esp),%ecx
+shr $6,%ecx
+movl %ecx,8(%esp)
+movl 112(%esp),%ecx
+shl $19,%ecx
+movl %ecx,44(%esp)
+movl 112(%esp),%ecx
+shr $13,%ecx
+movl %ecx,12(%esp)
+movl 120(%esp),%ecx
+shl $13,%ecx
+movl %ecx,48(%esp)
+movl 120(%esp),%ecx
+shr $19,%ecx
+movl %ecx,16(%esp)
+movl 128(%esp),%ecx
+shl $6,%ecx
+movl %ecx,52(%esp)
+movl 128(%esp),%ecx
+shr $26,%ecx
+movl 136(%esp),%edx
+add %edx,%ecx
+movl %ecx,20(%esp)
+movl 144(%esp),%ecx
+shl $25,%ecx
+movl %ecx,56(%esp)
+movl 144(%esp),%ecx
+shr $7,%ecx
+movl %ecx,24(%esp)
+movl 152(%esp),%ecx
+shl $19,%ecx
+movl %ecx,60(%esp)
+movl 152(%esp),%ecx
+shr $13,%ecx
+movl %ecx,28(%esp)
+movl 160(%esp),%ecx
+shl $12,%ecx
+movl %ecx,64(%esp)
+movl 160(%esp),%ecx
+shr $20,%ecx
+movl %ecx,32(%esp)
+movl 168(%esp),%ecx
+shl $6,%ecx
+movl %ecx,68(%esp)
+movl 168(%esp),%ecx
+shr $26,%ecx
+movl %ecx,36(%esp)
+mov $0,%ecx
+movl %ecx,72(%esp)
+movl 4(%esp),%ecx
+addl 40(%esp),%ecx
+movl %ecx,4(%esp)
+movl 8(%esp),%ecx
+adcl 44(%esp),%ecx
+movl %ecx,8(%esp)
+movl 12(%esp),%ecx
+adcl 48(%esp),%ecx
+movl %ecx,12(%esp)
+movl 16(%esp),%ecx
+adcl 52(%esp),%ecx
+movl %ecx,16(%esp)
+movl 20(%esp),%ecx
+adcl 56(%esp),%ecx
+movl %ecx,20(%esp)
+movl 24(%esp),%ecx
+adcl 60(%esp),%ecx
+movl %ecx,24(%esp)
+movl 28(%esp),%ecx
+adcl 64(%esp),%ecx
+movl %ecx,28(%esp)
+movl 32(%esp),%ecx
+adcl 68(%esp),%ecx
+movl %ecx,32(%esp)
+movl 36(%esp),%ecx
+adcl 72(%esp),%ecx
+movl %ecx,36(%esp)
+movl 4(%esp),%ecx
+adc $0x13,%ecx
+movl %ecx,40(%esp)
+movl 8(%esp),%ecx
+adc $0,%ecx
+movl %ecx,44(%esp)
+movl 12(%esp),%ecx
+adc $0,%ecx
+movl %ecx,48(%esp)
+movl 16(%esp),%ecx
+adc $0,%ecx
+movl %ecx,52(%esp)
+movl 20(%esp),%ecx
+adc $0,%ecx
+movl %ecx,56(%esp)
+movl 24(%esp),%ecx
+adc $0,%ecx
+movl %ecx,60(%esp)
+movl 28(%esp),%ecx
+adc $0,%ecx
+movl %ecx,64(%esp)
+movl 32(%esp),%ecx
+adc $0x80000000,%ecx
+movl %ecx,68(%esp)
+movl 36(%esp),%ebp
+adc $0xffffffff,%ebp
+and $0x80000000,%ebp
+sar $31,%ebp
+movl 4(%esp,%eax),%ecx
+movl 4(%esp),%edx
+xorl 40(%esp),%edx
+and %ebp,%edx
+xorl 40(%esp),%edx
+movl %edx,0(%ecx)
+movl 8(%esp),%edx
+xorl 44(%esp),%edx
+and %ebp,%edx
+xorl 44(%esp),%edx
+movl %edx,4(%ecx)
+movl 12(%esp),%edx
+xorl 48(%esp),%edx
+and %ebp,%edx
+xorl 48(%esp),%edx
+movl %edx,8(%ecx)
+movl 16(%esp),%edx
+xorl 52(%esp),%edx
+and %ebp,%edx
+xorl 52(%esp),%edx
+movl %edx,12(%ecx)
+movl 20(%esp),%edx
+xorl 56(%esp),%edx
+and %ebp,%edx
+xorl 56(%esp),%edx
+movl %edx,16(%ecx)
+movl 24(%esp),%edx
+xorl 60(%esp),%edx
+and %ebp,%edx
+xorl 60(%esp),%edx
+movl %edx,20(%ecx)
+movl 28(%esp),%edx
+xorl 64(%esp),%edx
+and %ebp,%edx
+xorl 64(%esp),%edx
+movl %edx,24(%ecx)
+movl 32(%esp),%edx
+xorl 68(%esp),%edx
+and %ebp,%edx
+xorl 68(%esp),%edx
+movl %edx,28(%ecx)
+movl 0(%esp),%ebp
+add %eax,%esp
+ret
diff --git a/curve25519/curve25519_athlon_init.s b/curve25519/curve25519_athlon_init.s
new file mode 100644
index 0000000..7d83655
--- /dev/null
+++ b/curve25519/curve25519_athlon_init.s
@@ -0,0 +1,13 @@
+.text
+.p2align 5
+.globl _curve25519_athlon_init
+.globl curve25519_athlon_init
+_curve25519_athlon_init:
+curve25519_athlon_init:
+mov %esp,%eax
+and $31,%eax
+add $0,%eax
+sub %eax,%esp
+fldcw curve25519_athlon_rounding
+add %eax,%esp
+ret
diff --git a/curve25519/curve25519_athlon_mainloop.s b/curve25519/curve25519_athlon_mainloop.s
new file mode 100644
index 0000000..45230ef
--- /dev/null
+++ b/curve25519/curve25519_athlon_mainloop.s
@@ -0,0 +1,3990 @@
+.text
+.p2align 5
+.globl _curve25519_athlon_mainloop
+.globl curve25519_athlon_mainloop
+_curve25519_athlon_mainloop:
+curve25519_athlon_mainloop:
+mov %esp,%eax
+and $31,%eax
+add $704,%eax
+sub %eax,%esp
+lea 256(%esp),%edx
+lea 512(%esp),%ecx
+fldl curve25519_athlon_two0
+fldl curve25519_athlon_zero
+movl %eax,160(%ecx)
+movl %ebx,164(%ecx)
+movl %esi,168(%ecx)
+movl %edi,172(%ecx)
+movl %ebp,176(%ecx)
+movl 4(%esp,%eax),%ebx
+fxch %st(1)
+fstl 0(%esp)
+fxch %st(1)
+fstl 8(%esp)
+fstl 16(%esp)
+fstl 24(%esp)
+fstl 32(%esp)
+fstl 40(%esp)
+fstl 48(%esp)
+fstl -120(%edx)
+fstl -112(%edx)
+fstl -104(%edx)
+fstl -96(%edx)
+fstl -88(%edx)
+fstl -80(%edx)
+fstl -72(%edx)
+fstl -64(%edx)
+fstl -56(%edx)
+fstl -48(%edx)
+fstl -40(%edx)
+fstl -32(%edx)
+fstl -24(%edx)
+fxch %st(1)
+fstpl 64(%edx)
+fstl 72(%edx)
+fstl 80(%edx)
+fstl 88(%edx)
+fstl 96(%edx)
+fstl 104(%edx)
+fstl 112(%edx)
+fstl 120(%edx)
+fstl -128(%ecx)
+fstpl -120(%ecx)
+fldl 0(%ebx)
+fldl 8(%ebx)
+fldl 16(%ebx)
+fldl 24(%ebx)
+fxch %st(3)
+fstl -16(%edx)
+fstpl 56(%esp)
+fldl 32(%ebx)
+fxch %st(2)
+fstl -8(%edx)
+fstpl 64(%esp)
+fldl 40(%ebx)
+fxch %st(1)
+fstl 0(%edx)
+fstpl 72(%esp)
+fldl 48(%ebx)
+fxch %st(3)
+fstl 8(%edx)
+fstpl 80(%esp)
+fldl 56(%ebx)
+fxch %st(2)
+fstl 16(%edx)
+fstpl 88(%esp)
+fldl 64(%ebx)
+fxch %st(1)
+fstl 24(%edx)
+fstpl 96(%esp)
+fldl 72(%ebx)
+fxch %st(3)
+fstl 32(%edx)
+fstpl 104(%esp)
+fxch %st(1)
+fstl 40(%edx)
+fstpl 112(%esp)
+fstl 48(%edx)
+fstpl 120(%esp)
+fstl 56(%edx)
+fstpl -128(%edx)
+movl 8(%esp,%eax),%ebx
+mov $28,%edi
+mov $31,%ebp
+movl 28(%ebx),%esi
+rol $1,%esi
+._morebytes:
+movl %edi,188(%ecx)
+._morebits:
+rol $1,%esi
+movl %esi,180(%ecx)
+movl %ebp,184(%ecx)
+and $1,%esi
+movl $0x43300000,-108(%ecx)
+movl %esi,-112(%ecx)
+fldl -96(%edx)
+fldl 0(%esp)
+fadd %st(0),%st(1)
+fsubl -96(%edx)
+fldl 64(%edx)
+fldl -16(%edx)
+fadd %st(0),%st(1)
+fsubl 64(%edx)
+fldl -88(%edx)
+fldl 8(%esp)
+fadd %st(0),%st(1)
+fsubl -88(%edx)
+fxch %st(5)
+fstpl 0(%esp)
+fxch %st(3)
+fstpl -96(%edx)
+fldl 72(%edx)
+fldl -8(%edx)
+fadd %st(0),%st(1)
+fsubl 72(%edx)
+fxch %st(3)
+fstpl -16(%edx)
+fxch %st(1)
+fstpl 64(%edx)
+fldl -80(%edx)
+fldl 16(%esp)
+fadd %st(0),%st(1)
+fsubl -80(%edx)
+fxch %st(4)
+fstpl 8(%esp)
+fxch %st(4)
+fstpl -88(%edx)
+fldl 80(%edx)
+fldl 0(%edx)
+fadd %st(0),%st(1)
+fsubl 80(%edx)
+fxch %st(2)
+fstpl -8(%edx)
+fxch %st(2)
+fstpl 72(%edx)
+fldl -72(%edx)
+fldl 24(%esp)
+fadd %st(0),%st(1)
+fsubl -72(%edx)
+fxch %st(5)
+fstpl 16(%esp)
+fxch %st(3)
+fstpl -80(%edx)
+fldl 88(%edx)
+fldl 8(%edx)
+fadd %st(0),%st(1)
+fsubl 88(%edx)
+fxch %st(3)
+fstpl 0(%edx)
+fxch %st(1)
+fstpl 80(%edx)
+fldl -64(%edx)
+fldl 32(%esp)
+fadd %st(0),%st(1)
+fsubl -64(%edx)
+fxch %st(4)
+fstpl 24(%esp)
+fxch %st(4)
+fstpl -72(%edx)
+fldl 96(%edx)
+fldl 16(%edx)
+fadd %st(0),%st(1)
+fsubl 96(%edx)
+fxch %st(2)
+fstpl 8(%edx)
+fxch %st(2)
+fstpl 88(%edx)
+fldl -56(%edx)
+fldl 40(%esp)
+fadd %st(0),%st(1)
+fsubl -56(%edx)
+fxch %st(5)
+fstpl 32(%esp)
+fxch %st(3)
+fstpl -64(%edx)
+fldl 104(%edx)
+fldl 24(%edx)
+fadd %st(0),%st(1)
+fsubl 104(%edx)
+fxch %st(3)
+fstpl 16(%edx)
+fxch %st(1)
+fstpl 96(%edx)
+fldl -48(%edx)
+fldl 48(%esp)
+fadd %st(0),%st(1)
+fsubl -48(%edx)
+fxch %st(4)
+fstpl 40(%esp)
+fxch %st(4)
+fstpl -56(%edx)
+fldl 112(%edx)
+fldl 32(%edx)
+fadd %st(0),%st(1)
+fsubl 112(%edx)
+fxch %st(2)
+fstpl 24(%edx)
+fxch %st(2)
+fstpl 104(%edx)
+fldl -40(%edx)
+fldl -120(%edx)
+fadd %st(0),%st(1)
+fsubl -40(%edx)
+fxch %st(5)
+fstpl 48(%esp)
+fxch %st(3)
+fstpl -48(%edx)
+fldl 120(%edx)
+fldl 40(%edx)
+fadd %st(0),%st(1)
+fsubl 120(%edx)
+fxch %st(3)
+fstpl 32(%edx)
+fxch %st(1)
+fstpl 112(%edx)
+fldl -32(%edx)
+fldl -112(%edx)
+fadd %st(0),%st(1)
+fsubl -32(%edx)
+fxch %st(4)
+fstpl -120(%edx)
+fxch %st(4)
+fstpl -40(%edx)
+fldl -128(%ecx)
+fldl 48(%edx)
+fadd %st(0),%st(1)
+fsubl -128(%ecx)
+fxch %st(2)
+fstpl 40(%edx)
+fxch %st(2)
+fstpl 120(%edx)
+fldl -24(%edx)
+fldl -104(%edx)
+fadd %st(0),%st(1)
+fsubl -24(%edx)
+fxch %st(5)
+fstpl -112(%edx)
+fxch %st(3)
+fstpl -32(%edx)
+fldl -120(%ecx)
+fldl 56(%edx)
+fadd %st(0),%st(1)
+fsubl -120(%ecx)
+fxch %st(3)
+fstpl 48(%edx)
+fxch %st(1)
+fstpl -128(%ecx)
+fldl -112(%ecx)
+fsubl curve25519_athlon_in0offset
+fldl curve25519_athlon_two0
+fsub %st(1),%st(0)
+fxch %st(4)
+fstpl -104(%edx)
+fxch %st(4)
+fstpl -24(%edx)
+fstpl 56(%edx)
+fstpl -120(%ecx)
+fxch %st(1)
+fstl 136(%ecx)
+fldl 0(%esp)
+fmul %st(2),%st(0)
+fldl -16(%edx)
+fmul %st(2),%st(0)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmul %st(3),%st(0)
+fldl -8(%edx)
+fmul %st(3),%st(0)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmul %st(4),%st(0)
+fldl 0(%edx)
+fmul %st(4),%st(0)
+faddp %st(0),%st(1)
+fldl 24(%esp)
+fmul %st(5),%st(0)
+fldl 8(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl -112(%ecx)
+fldl 32(%esp)
+fmul %st(5),%st(0)
+fldl 16(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fstpl -104(%ecx)
+fldl 40(%esp)
+fmul %st(5),%st(0)
+fldl 24(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(1)
+fstpl -96(%ecx)
+fldl 48(%esp)
+fmul %st(5),%st(0)
+fldl 32(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl -88(%ecx)
+fldl -120(%edx)
+fmul %st(5),%st(0)
+fldl 40(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fstpl -80(%ecx)
+fldl -112(%edx)
+fmul %st(5),%st(0)
+fldl 48(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(1)
+fstpl -72(%ecx)
+fldl -104(%edx)
+fmul %st(5),%st(0)
+fldl 56(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl -64(%ecx)
+fldl -96(%edx)
+fmul %st(5),%st(0)
+fldl 64(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fstpl -56(%ecx)
+fldl -88(%edx)
+fmul %st(5),%st(0)
+fldl 72(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(1)
+fstpl -48(%ecx)
+fldl -80(%edx)
+fmul %st(5),%st(0)
+fldl 80(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl -40(%ecx)
+fldl -72(%edx)
+fmul %st(5),%st(0)
+fldl 88(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fstpl -32(%ecx)
+fldl -64(%edx)
+fmul %st(5),%st(0)
+fldl 96(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(1)
+fstpl -24(%ecx)
+fldl -56(%edx)
+fmul %st(5),%st(0)
+fldl 104(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl -16(%ecx)
+fldl -48(%edx)
+fmul %st(5),%st(0)
+fldl 112(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fstpl -8(%ecx)
+fldl -40(%edx)
+fmul %st(5),%st(0)
+fldl 120(%edx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(1)
+fstpl 0(%ecx)
+fldl -32(%edx)
+fmul %st(5),%st(0)
+fldl -128(%ecx)
+fmul %st(5),%st(0)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl 8(%ecx)
+fldl -24(%edx)
+fmulp %st(0),%st(5)
+fldl -120(%ecx)
+fmulp %st(0),%st(4)
+fxch %st(3)
+faddp %st(0),%st(4)
+fstpl 16(%ecx)
+fxch %st(1)
+fstpl 24(%ecx)
+fstpl 32(%ecx)
+fstpl 40(%ecx)
+fldl -24(%edx)
+fmull 56(%edx)
+fmull curve25519_athlon_scale
+fldl -96(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(1)
+fldl -88(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(1)
+fldl -96(%edx)
+fmull 56(%edx)
+fldl -80(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(2)
+fldl -88(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(1)
+fldl -72(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(2)
+fldl -80(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(1)
+fldl -64(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(2)
+fldl -72(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(1)
+fldl -88(%edx)
+fmull 56(%edx)
+fldl -56(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(3)
+fldl -64(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(2)
+fldl -80(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(1)
+fldl -48(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(3)
+fldl -56(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(2)
+fldl -72(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(1)
+fldl -40(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(3)
+fldl -48(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(2)
+fldl -64(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(1)
+fldl -32(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(3)
+fldl -40(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(2)
+fldl -56(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fldl -80(%edx)
+fmull 56(%edx)
+fldl -48(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(2)
+fldl -32(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha230
+fadd %st(4),%st(0)
+fldl -72(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(2)
+fldl -40(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(3)
+fldl -24(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha230
+fldl -64(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(2)
+fldl -72(%edx)
+fmull 56(%edx)
+fldl -32(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fldl -56(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(3)
+fldl -64(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(2)
+fsubrp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(4),%st(0)
+fldl -48(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(3)
+fldl -56(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(2)
+fldl -24(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha255
+fldl -40(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(3)
+fldl -64(%edx)
+fmull 56(%edx)
+fldl -48(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(3)
+fldl -32(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(5)
+fxch %st(5)
+fstpl 64(%ecx)
+fldl -56(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(5)
+fldl -40(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fldl -24(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(2)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fldl -48(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(5)
+fldl -32(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(3)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl -96(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(2)
+fxch %st(3)
+fstpl 72(%ecx)
+fldl -56(%edx)
+fmull 56(%edx)
+fldl -40(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(5)
+fldl -24(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(3)
+fldl -96(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fldl -48(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(2)
+fldl -32(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(6)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl -88(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(5)
+fxch %st(3)
+fsubl curve25519_athlon_alpha26
+fldl -40(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(2)
+fldl -24(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(6)
+fldl -96(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(2)
+fldl -48(%edx)
+fmull 56(%edx)
+fldl -32(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(2)
+fxch %st(5)
+fmull curve25519_athlon_scale
+fldl -88(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha51
+fadd %st(5),%st(0)
+fldl -40(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(7)
+fldl -24(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(3)
+fldl -96(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(2)
+fldl -80(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(5)
+fsubl curve25519_athlon_alpha51
+fxch %st(3)
+fstpl 48(%ecx)
+fldl -32(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(6)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl -88(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(2)
+fxch %st(2)
+fadd %st(0),%st(3)
+fsubrp %st(0),%st(4)
+fldl -40(%edx)
+fmull 56(%edx)
+fldl -24(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(6)
+fldl -96(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(3)
+fldl -80(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha77
+fadd %st(4),%st(0)
+fldl -32(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(2)
+fxch %st(6)
+fmull curve25519_athlon_scale
+fldl -88(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(4)
+fldl -72(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(3)
+fxch %st(6)
+fsubl curve25519_athlon_alpha77
+fxch %st(5)
+fstpl 56(%ecx)
+fldl -24(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(1)
+fldl -96(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(6)
+fldl -80(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(3)
+fxch %st(4)
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(3)
+fldl -32(%edx)
+fmull 56(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl -88(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(6)
+fldl -72(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fldl -24(%edx)
+fmull 48(%edx)
+faddp %st(0),%st(6)
+fldl -96(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(2)
+fldl -80(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(7)
+fldl -64(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha102
+fxch %st(4)
+fstpl -24(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl -88(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(5)
+fldl -72(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(6)
+fxch %st(3)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(1)
+fldl -96(%edx)
+fmull 40(%edx)
+faddp %st(0),%st(3)
+fldl -80(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(4)
+fldl -64(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha128
+fadd %st(2),%st(0)
+fldl -88(%edx)
+fmull 32(%edx)
+faddp %st(0),%st(4)
+fldl -72(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(5)
+fldl -56(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(6)
+fsubl curve25519_athlon_alpha128
+fxch %st(1)
+fstpl -96(%edx)
+fldl -80(%edx)
+fmull 24(%edx)
+faddp %st(0),%st(3)
+fldl -64(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(1)
+fstpl -88(%edx)
+fldl -72(%edx)
+fmull 16(%edx)
+faddp %st(0),%st(1)
+fldl -56(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha153
+fadd %st(3),%st(0)
+fldl -64(%edx)
+fmull 8(%edx)
+faddp %st(0),%st(2)
+fldl -48(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha153
+fldl -56(%edx)
+fmull 0(%edx)
+faddp %st(0),%st(2)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(3)
+fxch %st(2)
+fstpl -80(%edx)
+fldl -48(%edx)
+fmull -8(%edx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fldl -40(%edx)
+fmull -16(%edx)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha179
+fldl 64(%ecx)
+fldl 72(%ecx)
+fxch %st(2)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(3)
+fldl curve25519_athlon_alpha204
+fadd %st(4),%st(0)
+fsubl curve25519_athlon_alpha204
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(4)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+faddp %st(0),%st(2)
+fxch %st(2)
+fstpl -72(%edx)
+fxch %st(2)
+fstpl -64(%edx)
+fstpl -56(%edx)
+fstpl -48(%edx)
+fldl -104(%edx)
+fmull -120(%ecx)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -128(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 120(%edx)
+faddp %st(0),%st(1)
+fldl 0(%esp)
+fmull -120(%ecx)
+fldl 16(%esp)
+fmull 112(%edx)
+faddp %st(0),%st(2)
+fldl 8(%esp)
+fmull -128(%ecx)
+faddp %st(0),%st(1)
+fldl 24(%esp)
+fmull 104(%edx)
+faddp %st(0),%st(2)
+fldl 16(%esp)
+fmull 120(%edx)
+faddp %st(0),%st(1)
+fldl 32(%esp)
+fmull 96(%edx)
+faddp %st(0),%st(2)
+fldl 24(%esp)
+fmull 112(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -120(%ecx)
+fldl 40(%esp)
+fmull 88(%edx)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmull 104(%edx)
+faddp %st(0),%st(2)
+fldl 16(%esp)
+fmull -128(%ecx)
+faddp %st(0),%st(1)
+fldl 48(%esp)
+fmull 80(%edx)
+faddp %st(0),%st(3)
+fldl 40(%esp)
+fmull 96(%edx)
+faddp %st(0),%st(2)
+fldl 24(%esp)
+fmull 120(%edx)
+faddp %st(0),%st(1)
+fldl -120(%edx)
+fmull 72(%edx)
+faddp %st(0),%st(3)
+fldl 48(%esp)
+fmull 88(%edx)
+faddp %st(0),%st(2)
+fldl 32(%esp)
+fmull 112(%edx)
+faddp %st(0),%st(1)
+fldl -112(%edx)
+fmull 64(%edx)
+faddp %st(0),%st(3)
+fldl -120(%edx)
+fmull 80(%edx)
+faddp %st(0),%st(2)
+fldl 40(%esp)
+fmull 104(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -120(%ecx)
+fldl 48(%esp)
+fmull 96(%edx)
+faddp %st(0),%st(2)
+fldl -112(%edx)
+fmull 72(%edx)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha230
+fadd %st(4),%st(0)
+fldl 24(%esp)
+fmull -128(%ecx)
+faddp %st(0),%st(2)
+fldl -120(%edx)
+fmull 88(%edx)
+faddp %st(0),%st(3)
+fldl -104(%edx)
+fmull 64(%edx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha230
+fldl 32(%esp)
+fmull 120(%edx)
+faddp %st(0),%st(2)
+fldl 24(%esp)
+fmull -120(%ecx)
+fldl -112(%edx)
+fmull 80(%edx)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fldl 40(%esp)
+fmull 112(%edx)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmull -128(%ecx)
+faddp %st(0),%st(2)
+fsubrp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(4),%st(0)
+fldl 48(%esp)
+fmull 104(%edx)
+faddp %st(0),%st(3)
+fldl 40(%esp)
+fmull 120(%edx)
+faddp %st(0),%st(2)
+fldl -104(%edx)
+fmull 72(%edx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha255
+fldl -120(%edx)
+fmull 96(%edx)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmull -120(%ecx)
+fldl 48(%esp)
+fmull 112(%edx)
+faddp %st(0),%st(3)
+fldl -112(%edx)
+fmull 88(%edx)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(5)
+fxch %st(5)
+fstpl 8(%edx)
+fldl 40(%esp)
+fmull -128(%ecx)
+faddp %st(0),%st(5)
+fldl -120(%edx)
+fmull 104(%edx)
+faddp %st(0),%st(1)
+fldl -104(%edx)
+fmull 80(%edx)
+faddp %st(0),%st(2)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fldl 48(%esp)
+fmull 120(%edx)
+faddp %st(0),%st(5)
+fldl -112(%edx)
+fmull 96(%edx)
+faddp %st(0),%st(3)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(2)
+fxch %st(3)
+fstpl 16(%edx)
+fldl 40(%esp)
+fmull -120(%ecx)
+fldl -120(%edx)
+fmull 112(%edx)
+faddp %st(0),%st(5)
+fldl -104(%edx)
+fmull 88(%edx)
+faddp %st(0),%st(3)
+fldl 0(%esp)
+fmull 72(%edx)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fldl 48(%esp)
+fmull -128(%ecx)
+faddp %st(0),%st(2)
+fldl -112(%edx)
+fmull 104(%edx)
+faddp %st(0),%st(6)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 8(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(5)
+fxch %st(3)
+fsubl curve25519_athlon_alpha26
+fldl -120(%edx)
+fmull 120(%edx)
+faddp %st(0),%st(2)
+fldl -104(%edx)
+fmull 96(%edx)
+faddp %st(0),%st(6)
+fldl 0(%esp)
+fmull 80(%edx)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(2)
+fldl 48(%esp)
+fmull -120(%ecx)
+fldl -112(%edx)
+fmull 112(%edx)
+faddp %st(0),%st(2)
+fxch %st(5)
+fmull curve25519_athlon_scale
+fldl 8(%esp)
+fmull 72(%edx)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha51
+fadd %st(5),%st(0)
+fldl -120(%edx)
+fmull -128(%ecx)
+faddp %st(0),%st(7)
+fldl -104(%edx)
+fmull 104(%edx)
+faddp %st(0),%st(3)
+fldl 0(%esp)
+fmull 88(%edx)
+faddp %st(0),%st(2)
+fldl 16(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(5)
+fsubl curve25519_athlon_alpha51
+fxch %st(3)
+fstpl -40(%edx)
+fldl -112(%edx)
+fmull 120(%edx)
+faddp %st(0),%st(6)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 8(%esp)
+fmull 80(%edx)
+faddp %st(0),%st(2)
+fxch %st(2)
+fadd %st(0),%st(3)
+fsubrp %st(0),%st(4)
+fldl -120(%edx)
+fmull -120(%ecx)
+fldl -104(%edx)
+fmull 112(%edx)
+faddp %st(0),%st(6)
+fldl 0(%esp)
+fmull 96(%edx)
+faddp %st(0),%st(3)
+fldl 16(%esp)
+fmull 72(%edx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha77
+fadd %st(4),%st(0)
+fldl -112(%edx)
+fmull -128(%ecx)
+faddp %st(0),%st(2)
+fxch %st(6)
+fmull curve25519_athlon_scale
+fldl 8(%esp)
+fmull 88(%edx)
+faddp %st(0),%st(4)
+fldl 24(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(3)
+fxch %st(6)
+fsubl curve25519_athlon_alpha77
+fxch %st(5)
+fstpl -32(%edx)
+fldl -104(%edx)
+fmull 120(%edx)
+faddp %st(0),%st(1)
+fldl 0(%esp)
+fmull 104(%edx)
+faddp %st(0),%st(6)
+fldl 16(%esp)
+fmull 80(%edx)
+faddp %st(0),%st(3)
+fxch %st(4)
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(3)
+fldl -112(%edx)
+fmull -120(%ecx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 8(%esp)
+fmull 96(%edx)
+faddp %st(0),%st(6)
+fldl 24(%esp)
+fmull 72(%edx)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fldl -104(%edx)
+fmull -128(%ecx)
+faddp %st(0),%st(6)
+fldl 0(%esp)
+fmull 112(%edx)
+faddp %st(0),%st(2)
+fldl 16(%esp)
+fmull 88(%edx)
+faddp %st(0),%st(7)
+fldl 32(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha102
+fxch %st(4)
+fstpl -104(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 8(%esp)
+fmull 104(%edx)
+faddp %st(0),%st(5)
+fldl 24(%esp)
+fmull 80(%edx)
+faddp %st(0),%st(6)
+fxch %st(3)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(1)
+fldl 0(%esp)
+fmull 120(%edx)
+faddp %st(0),%st(3)
+fldl 16(%esp)
+fmull 96(%edx)
+faddp %st(0),%st(4)
+fldl 32(%esp)
+fmull 72(%edx)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha128
+fadd %st(2),%st(0)
+fldl 8(%esp)
+fmull 112(%edx)
+faddp %st(0),%st(4)
+fldl 24(%esp)
+fmull 88(%edx)
+faddp %st(0),%st(5)
+fldl 40(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(6)
+fsubl curve25519_athlon_alpha128
+fxch %st(1)
+fstpl -16(%edx)
+fldl 16(%esp)
+fmull 104(%edx)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmull 80(%edx)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(1)
+fstpl -8(%edx)
+fldl 24(%esp)
+fmull 96(%edx)
+faddp %st(0),%st(1)
+fldl 40(%esp)
+fmull 72(%edx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha153
+fadd %st(3),%st(0)
+fldl 32(%esp)
+fmull 88(%edx)
+faddp %st(0),%st(2)
+fldl 48(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha153
+fldl 40(%esp)
+fmull 80(%edx)
+faddp %st(0),%st(2)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(3)
+fxch %st(2)
+fstpl 0(%edx)
+fldl 48(%esp)
+fmull 72(%edx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fldl -120(%edx)
+fmull 64(%edx)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha179
+fldl 8(%edx)
+fldl 16(%edx)
+fxch %st(2)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(3)
+fldl curve25519_athlon_alpha204
+fadd %st(4),%st(0)
+fsubl curve25519_athlon_alpha204
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(4)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+faddp %st(0),%st(2)
+fxch %st(2)
+fstpl 8(%edx)
+fxch %st(2)
+fstpl 16(%edx)
+fstpl 24(%edx)
+fstpl 32(%edx)
+fldl -40(%ecx)
+fmul %st(0),%st(0)
+fldl -112(%ecx)
+fadd %st(0),%st(0)
+fldl -104(%ecx)
+fadd %st(0),%st(0)
+fldl -96(%ecx)
+fadd %st(0),%st(0)
+fldl -56(%ecx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl -40(%ecx)
+fmul %st(4),%st(0)
+fldl -48(%ecx)
+fmul %st(4),%st(0)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstl 0(%esp)
+fxch %st(3)
+fstl 8(%esp)
+fxch %st(3)
+fmull -48(%ecx)
+faddp %st(0),%st(1)
+fldl -64(%ecx)
+fxch %st(5)
+fmul %st(0),%st(3)
+fxch %st(3)
+faddp %st(0),%st(1)
+fxch %st(2)
+fadd %st(0),%st(0)
+fldl -56(%ecx)
+fmul %st(2),%st(0)
+faddp %st(0),%st(4)
+fxch %st(1)
+fstl 16(%esp)
+fldl -72(%ecx)
+fxch %st(5)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fadd %st(0),%st(0)
+fstpl 48(%esp)
+fldl -88(%ecx)
+fadd %st(0),%st(0)
+fstl 24(%esp)
+fldl -64(%ecx)
+fmul %st(1),%st(0)
+faddp %st(0),%st(4)
+fmul %st(4),%st(0)
+faddp %st(0),%st(2)
+fxch %st(3)
+fadd %st(0),%st(0)
+fstpl 40(%esp)
+fldl -80(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+fldl 8(%esp)
+fldl -40(%ecx)
+fmul %st(0),%st(1)
+fldl 16(%esp)
+fmul %st(0),%st(1)
+fldl -48(%ecx)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fldl 24(%esp)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(2)
+fldl -80(%ecx)
+fadd %st(0),%st(0)
+fstl 32(%esp)
+fmull -72(%ecx)
+faddp %st(0),%st(6)
+fxch %st(3)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(5),%st(0)
+fsubl curve25519_athlon_alpha255
+fsubr %st(0),%st(5)
+fldl -56(%ecx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(2)
+fldl -64(%ecx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fxch %st(3)
+fmull 40(%esp)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl -120(%edx)
+fldl -72(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fxch %st(3)
+fstpl -112(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 24(%esp)
+fmull -40(%ecx)
+fldl -112(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fldl 32(%esp)
+fmull -48(%ecx)
+faddp %st(0),%st(1)
+fldl 0(%esp)
+fmull -104(%ecx)
+faddp %st(0),%st(3)
+fldl 40(%esp)
+fmull -56(%ecx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha26
+fsubr %st(0),%st(2)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha51
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha51
+fsubr %st(0),%st(3)
+fldl -64(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -96(%ecx)
+faddp %st(0),%st(1)
+fldl -104(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha77
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha77
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl 64(%edx)
+fldl 32(%esp)
+fmull -40(%ecx)
+fldl 40(%esp)
+fmull -48(%ecx)
+faddp %st(0),%st(1)
+fldl 48(%esp)
+fmull -56(%ecx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -88(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -96(%ecx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha102
+fsubr %st(0),%st(2)
+fxch %st(3)
+fstpl 72(%edx)
+fldl 40(%esp)
+fmull -40(%ecx)
+fldl 48(%esp)
+fmull -48(%ecx)
+faddp %st(0),%st(1)
+fldl -56(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -80(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -88(%ecx)
+faddp %st(0),%st(1)
+fldl -96(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha128
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha128
+fsubr %st(0),%st(3)
+fxch %st(1)
+fstpl 80(%edx)
+fldl 48(%esp)
+fldl -40(%ecx)
+fmul %st(0),%st(1)
+fmul %st(5),%st(0)
+fxch %st(5)
+fmull -48(%ecx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -72(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -80(%ecx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -88(%ecx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha153
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha153
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl 88(%edx)
+fldl -48(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(4)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -64(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -72(%ecx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -80(%ecx)
+faddp %st(0),%st(1)
+fldl -88(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha179
+fsubr %st(0),%st(1)
+fldl -48(%ecx)
+fadd %st(0),%st(0)
+fmull -40(%ecx)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -56(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -64(%ecx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -72(%ecx)
+faddp %st(0),%st(1)
+fldl 24(%esp)
+fmull -80(%ecx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha204
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha204
+fsubr %st(0),%st(1)
+fldl -120(%edx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fldl -112(%edx)
+fxch %st(1)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(2)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstpl 96(%edx)
+fxch %st(4)
+fstpl 104(%edx)
+fxch %st(1)
+fstpl 112(%edx)
+fstpl 120(%edx)
+fxch %st(1)
+fstpl -128(%ecx)
+fstpl -120(%ecx)
+fldl 40(%ecx)
+fmul %st(0),%st(0)
+fldl -32(%ecx)
+fadd %st(0),%st(0)
+fldl -24(%ecx)
+fadd %st(0),%st(0)
+fldl -16(%ecx)
+fadd %st(0),%st(0)
+fldl 24(%ecx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 40(%ecx)
+fmul %st(4),%st(0)
+fldl 32(%ecx)
+fmul %st(4),%st(0)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstl 0(%esp)
+fxch %st(3)
+fstl 8(%esp)
+fxch %st(3)
+fmull 32(%ecx)
+faddp %st(0),%st(1)
+fldl 16(%ecx)
+fxch %st(5)
+fmul %st(0),%st(3)
+fxch %st(3)
+faddp %st(0),%st(1)
+fxch %st(2)
+fadd %st(0),%st(0)
+fldl 24(%ecx)
+fmul %st(2),%st(0)
+faddp %st(0),%st(4)
+fxch %st(1)
+fstl 16(%esp)
+fldl 8(%ecx)
+fxch %st(5)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fadd %st(0),%st(0)
+fstpl 48(%esp)
+fldl -8(%ecx)
+fadd %st(0),%st(0)
+fstl 24(%esp)
+fldl 16(%ecx)
+fmul %st(1),%st(0)
+faddp %st(0),%st(4)
+fmul %st(4),%st(0)
+faddp %st(0),%st(2)
+fxch %st(3)
+fadd %st(0),%st(0)
+fstpl 40(%esp)
+fldl 0(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+fldl 8(%esp)
+fldl 40(%ecx)
+fmul %st(0),%st(1)
+fldl 16(%esp)
+fmul %st(0),%st(1)
+fldl 32(%ecx)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fldl 24(%esp)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(2)
+fldl 0(%ecx)
+fadd %st(0),%st(0)
+fstl 32(%esp)
+fmull 8(%ecx)
+faddp %st(0),%st(6)
+fxch %st(3)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(5),%st(0)
+fsubl curve25519_athlon_alpha255
+fsubr %st(0),%st(5)
+fldl 24(%ecx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(2)
+fldl 16(%ecx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fxch %st(3)
+fmull 40(%esp)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl -120(%edx)
+fldl 8(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fxch %st(3)
+fstpl -112(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 24(%esp)
+fmull 40(%ecx)
+fldl -32(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fldl 32(%esp)
+fmull 32(%ecx)
+faddp %st(0),%st(1)
+fldl 0(%esp)
+fmull -24(%ecx)
+faddp %st(0),%st(3)
+fldl 40(%esp)
+fmull 24(%ecx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha26
+fsubr %st(0),%st(2)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha51
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha51
+fsubr %st(0),%st(3)
+fldl 16(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -16(%ecx)
+faddp %st(0),%st(1)
+fldl -24(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha77
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha77
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl -112(%ecx)
+fldl 32(%esp)
+fmull 40(%ecx)
+fldl 40(%esp)
+fmull 32(%ecx)
+faddp %st(0),%st(1)
+fldl 48(%esp)
+fmull 24(%ecx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -8(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -16(%ecx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha102
+fsubr %st(0),%st(2)
+fxch %st(3)
+fstpl -104(%ecx)
+fldl 40(%esp)
+fmull 40(%ecx)
+fldl 48(%esp)
+fmull 32(%ecx)
+faddp %st(0),%st(1)
+fldl 24(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 0(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -8(%ecx)
+faddp %st(0),%st(1)
+fldl -16(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha128
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha128
+fsubr %st(0),%st(3)
+fxch %st(1)
+fstpl -96(%ecx)
+fldl 48(%esp)
+fldl 40(%ecx)
+fmul %st(0),%st(1)
+fmul %st(5),%st(0)
+fxch %st(5)
+fmull 32(%ecx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 8(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 0(%ecx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -8(%ecx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha153
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha153
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl -88(%ecx)
+fldl 32(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(4)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 16(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 8(%ecx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull 0(%ecx)
+faddp %st(0),%st(1)
+fldl -8(%ecx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha179
+fsubr %st(0),%st(1)
+fldl 32(%ecx)
+fadd %st(0),%st(0)
+fmull 40(%ecx)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 24(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 16(%ecx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull 8(%ecx)
+faddp %st(0),%st(1)
+fldl 24(%esp)
+fmull 0(%ecx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha204
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha204
+fsubr %st(0),%st(1)
+fldl -120(%edx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fldl -112(%edx)
+fxch %st(1)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(2)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstpl -80(%ecx)
+fxch %st(4)
+fstpl -72(%ecx)
+fxch %st(1)
+fstpl -64(%ecx)
+fstpl -56(%ecx)
+fxch %st(1)
+fstpl -48(%ecx)
+fstpl -40(%ecx)
+fldl -40(%edx)
+fldl 48(%ecx)
+fadd %st(0),%st(1)
+fsubl -40(%edx)
+fxch %st(1)
+fstpl -120(%edx)
+fstpl -40(%edx)
+fldl -32(%edx)
+fldl 56(%ecx)
+fadd %st(0),%st(1)
+fsubl -32(%edx)
+fxch %st(1)
+fstpl -112(%edx)
+fstpl -32(%edx)
+fldl -104(%edx)
+fldl -24(%edx)
+fadd %st(0),%st(1)
+fsubl -104(%edx)
+fxch %st(1)
+fstpl -104(%edx)
+fstpl -24(%edx)
+fldl -16(%edx)
+fldl -96(%edx)
+fadd %st(0),%st(1)
+fsubl -16(%edx)
+fxch %st(1)
+fstpl -96(%edx)
+fstpl -16(%edx)
+fldl -8(%edx)
+fldl -88(%edx)
+fadd %st(0),%st(1)
+fsubl -8(%edx)
+fxch %st(1)
+fstpl -88(%edx)
+fstpl -8(%edx)
+fldl 0(%edx)
+fldl -80(%edx)
+fadd %st(0),%st(1)
+fsubl 0(%edx)
+fxch %st(1)
+fstpl -80(%edx)
+fstpl 0(%edx)
+fldl 8(%edx)
+fldl -72(%edx)
+fadd %st(0),%st(1)
+fsubl 8(%edx)
+fxch %st(1)
+fstpl -72(%edx)
+fstpl 8(%edx)
+fldl 16(%edx)
+fldl -64(%edx)
+fadd %st(0),%st(1)
+fsubl 16(%edx)
+fxch %st(1)
+fstpl -64(%edx)
+fstpl 16(%edx)
+fldl 24(%edx)
+fldl -56(%edx)
+fadd %st(0),%st(1)
+fsubl 24(%edx)
+fxch %st(1)
+fstpl -56(%edx)
+fstpl 24(%edx)
+fldl 32(%edx)
+fldl -48(%edx)
+fadd %st(0),%st(1)
+fsubl 32(%edx)
+fxch %st(1)
+fstpl -48(%edx)
+fstpl 32(%edx)
+fldl 64(%edx)
+fsubl -112(%ecx)
+fstpl -32(%ecx)
+fldl 72(%edx)
+fsubl -104(%ecx)
+fstpl -24(%ecx)
+fldl 80(%edx)
+fsubl -96(%ecx)
+fstpl -16(%ecx)
+fldl 88(%edx)
+fsubl -88(%ecx)
+fstpl -8(%ecx)
+fldl 96(%edx)
+fsubl -80(%ecx)
+fstpl 0(%ecx)
+fldl 104(%edx)
+fsubl -72(%ecx)
+fstpl 8(%ecx)
+fldl 112(%edx)
+fsubl -64(%ecx)
+fstpl 16(%ecx)
+fldl 120(%edx)
+fsubl -56(%ecx)
+fstpl 24(%ecx)
+fldl -128(%ecx)
+fsubl -48(%ecx)
+fstpl 32(%ecx)
+fldl -120(%ecx)
+fsubl -40(%ecx)
+fstpl 40(%ecx)
+fldl -48(%edx)
+fmul %st(0),%st(0)
+fldl -120(%edx)
+fadd %st(0),%st(0)
+fldl -112(%edx)
+fadd %st(0),%st(0)
+fldl -104(%edx)
+fadd %st(0),%st(0)
+fldl -64(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl -48(%edx)
+fmul %st(4),%st(0)
+fldl -56(%edx)
+fmul %st(4),%st(0)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstl 0(%esp)
+fxch %st(3)
+fstl 8(%esp)
+fxch %st(3)
+fmull -56(%edx)
+faddp %st(0),%st(1)
+fldl -72(%edx)
+fxch %st(5)
+fmul %st(0),%st(3)
+fxch %st(3)
+faddp %st(0),%st(1)
+fxch %st(2)
+fadd %st(0),%st(0)
+fldl -64(%edx)
+fmul %st(2),%st(0)
+faddp %st(0),%st(4)
+fxch %st(1)
+fstl 16(%esp)
+fldl -80(%edx)
+fxch %st(5)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fadd %st(0),%st(0)
+fstpl 48(%esp)
+fldl -96(%edx)
+fadd %st(0),%st(0)
+fstl 24(%esp)
+fldl -72(%edx)
+fmul %st(1),%st(0)
+faddp %st(0),%st(4)
+fmul %st(4),%st(0)
+faddp %st(0),%st(2)
+fxch %st(3)
+fadd %st(0),%st(0)
+fstpl 40(%esp)
+fldl -88(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+fldl 8(%esp)
+fldl -48(%edx)
+fmul %st(0),%st(1)
+fldl 16(%esp)
+fmul %st(0),%st(1)
+fldl -56(%edx)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fldl 24(%esp)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(2)
+fldl -88(%edx)
+fadd %st(0),%st(0)
+fstl 32(%esp)
+fmull -80(%edx)
+faddp %st(0),%st(6)
+fxch %st(3)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(5),%st(0)
+fsubl curve25519_athlon_alpha255
+fsubr %st(0),%st(5)
+fldl -64(%edx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(2)
+fldl -72(%edx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fxch %st(3)
+fmull 40(%esp)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl 48(%edx)
+fldl -80(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fxch %st(3)
+fstpl 56(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 24(%esp)
+fmull -48(%edx)
+fldl -120(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fldl 32(%esp)
+fmull -56(%edx)
+faddp %st(0),%st(1)
+fldl 0(%esp)
+fmull -112(%edx)
+faddp %st(0),%st(3)
+fldl 40(%esp)
+fmull -64(%edx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha26
+fsubr %st(0),%st(2)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha51
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha51
+fsubr %st(0),%st(3)
+fldl -72(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -104(%edx)
+faddp %st(0),%st(1)
+fldl -112(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha77
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha77
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl -120(%edx)
+fldl 32(%esp)
+fmull -48(%edx)
+fldl 40(%esp)
+fmull -56(%edx)
+faddp %st(0),%st(1)
+fldl 48(%esp)
+fmull -64(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -96(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -104(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha102
+fsubr %st(0),%st(2)
+fxch %st(3)
+fstpl -112(%edx)
+fldl 40(%esp)
+fmull -48(%edx)
+fldl 48(%esp)
+fmull -56(%edx)
+faddp %st(0),%st(1)
+fldl -64(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -88(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -96(%edx)
+faddp %st(0),%st(1)
+fldl -104(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha128
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha128
+fsubr %st(0),%st(3)
+fxch %st(1)
+fstpl -104(%edx)
+fldl 48(%esp)
+fldl -48(%edx)
+fmul %st(0),%st(1)
+fmul %st(5),%st(0)
+fxch %st(5)
+fmull -56(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -80(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -88(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -96(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha153
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha153
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl 40(%edx)
+fldl -56(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(4)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -72(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -80(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -88(%edx)
+faddp %st(0),%st(1)
+fldl -96(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha179
+fsubr %st(0),%st(1)
+fldl -56(%edx)
+fadd %st(0),%st(0)
+fmull -48(%edx)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -64(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -72(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -80(%edx)
+faddp %st(0),%st(1)
+fldl 24(%esp)
+fmull -88(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha204
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha204
+fsubr %st(0),%st(1)
+fldl 48(%edx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fldl 56(%edx)
+fxch %st(1)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(2)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstpl -96(%edx)
+fxch %st(4)
+fstpl -88(%edx)
+fxch %st(1)
+fstpl -80(%edx)
+fstpl -72(%edx)
+fxch %st(1)
+fstpl -64(%edx)
+fstpl -56(%edx)
+fldl 32(%edx)
+fmul %st(0),%st(0)
+fldl -40(%edx)
+fadd %st(0),%st(0)
+fldl -32(%edx)
+fadd %st(0),%st(0)
+fldl -24(%edx)
+fadd %st(0),%st(0)
+fldl 16(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 32(%edx)
+fmul %st(4),%st(0)
+fldl 24(%edx)
+fmul %st(4),%st(0)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstl 0(%esp)
+fxch %st(3)
+fstl 8(%esp)
+fxch %st(3)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fldl 8(%edx)
+fxch %st(5)
+fmul %st(0),%st(3)
+fxch %st(3)
+faddp %st(0),%st(1)
+fxch %st(2)
+fadd %st(0),%st(0)
+fldl 16(%edx)
+fmul %st(2),%st(0)
+faddp %st(0),%st(4)
+fxch %st(1)
+fstl 16(%esp)
+fldl 0(%edx)
+fxch %st(5)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fadd %st(0),%st(0)
+fstpl 48(%esp)
+fldl -16(%edx)
+fadd %st(0),%st(0)
+fstl 24(%esp)
+fldl 8(%edx)
+fmul %st(1),%st(0)
+faddp %st(0),%st(4)
+fmul %st(4),%st(0)
+faddp %st(0),%st(2)
+fxch %st(3)
+fadd %st(0),%st(0)
+fstpl 40(%esp)
+fldl -8(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+fldl 8(%esp)
+fldl 32(%edx)
+fmul %st(0),%st(1)
+fldl 16(%esp)
+fmul %st(0),%st(1)
+fldl 24(%edx)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fldl 24(%esp)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(2)
+fldl -8(%edx)
+fadd %st(0),%st(0)
+fstl 32(%esp)
+fmull 0(%edx)
+faddp %st(0),%st(6)
+fxch %st(3)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(5),%st(0)
+fsubl curve25519_athlon_alpha255
+fsubr %st(0),%st(5)
+fldl 16(%edx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(2)
+fldl 8(%edx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fxch %st(3)
+fmull 40(%esp)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl -48(%edx)
+fldl 0(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fxch %st(3)
+fstpl 48(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 24(%esp)
+fmull 32(%edx)
+fldl -40(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fldl 32(%esp)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fldl 0(%esp)
+fmull -32(%edx)
+faddp %st(0),%st(3)
+fldl 40(%esp)
+fmull 16(%edx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha26
+fsubr %st(0),%st(2)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha51
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha51
+fsubr %st(0),%st(3)
+fldl 8(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -24(%edx)
+faddp %st(0),%st(1)
+fldl -32(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha77
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha77
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl 56(%ecx)
+fldl 32(%esp)
+fmull 32(%edx)
+fldl 40(%esp)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fldl 48(%esp)
+fmull 16(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -16(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -24(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha102
+fsubr %st(0),%st(2)
+fxch %st(3)
+fstpl 64(%ecx)
+fldl 40(%esp)
+fmull 32(%edx)
+fldl 48(%esp)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fldl 16(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull -8(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -16(%edx)
+faddp %st(0),%st(1)
+fldl -24(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha128
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha128
+fsubr %st(0),%st(3)
+fxch %st(1)
+fstpl 72(%ecx)
+fldl 48(%esp)
+fldl 32(%edx)
+fmul %st(0),%st(1)
+fmul %st(5),%st(0)
+fxch %st(5)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 0(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull -8(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -16(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha153
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha153
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl 80(%ecx)
+fldl 24(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(4)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 8(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 0(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull -8(%edx)
+faddp %st(0),%st(1)
+fldl -16(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha179
+fsubr %st(0),%st(1)
+fldl 24(%edx)
+fadd %st(0),%st(0)
+fmull 32(%edx)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 16(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 8(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull 0(%edx)
+faddp %st(0),%st(1)
+fldl 24(%esp)
+fmull -8(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha204
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha204
+fsubr %st(0),%st(1)
+fldl -48(%edx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fldl 48(%edx)
+fxch %st(1)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(2)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstpl 88(%ecx)
+fxch %st(4)
+fstpl 96(%ecx)
+fxch %st(1)
+fstpl 104(%ecx)
+fstpl 112(%ecx)
+fxch %st(1)
+fstpl 120(%ecx)
+fstpl 128(%ecx)
+fldl 32(%ecx)
+fmull curve25519_athlon_121665
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fldl 40(%ecx)
+fmull curve25519_athlon_121665
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl 0(%esp)
+fldl curve25519_athlon_alpha255
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha255
+fsubr %st(0),%st(1)
+fmull curve25519_athlon_scale
+fxch %st(1)
+fstpl 8(%esp)
+fldl -32(%ecx)
+fmull curve25519_athlon_121665
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha26
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha26
+fldl -24(%ecx)
+fmull curve25519_athlon_121665
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl -48(%edx)
+fldl curve25519_athlon_alpha51
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha51
+fldl -16(%ecx)
+fmull curve25519_athlon_121665
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl -40(%edx)
+fldl curve25519_athlon_alpha77
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha77
+fldl -8(%ecx)
+fmull curve25519_athlon_121665
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl -32(%edx)
+fldl curve25519_athlon_alpha102
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha102
+fldl 0(%ecx)
+fmull curve25519_athlon_121665
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl -24(%edx)
+fldl curve25519_athlon_alpha128
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha128
+fldl 8(%ecx)
+fmull curve25519_athlon_121665
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl -16(%edx)
+fldl curve25519_athlon_alpha153
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha153
+fldl 16(%ecx)
+fmull curve25519_athlon_121665
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl -8(%edx)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha179
+fldl 24(%ecx)
+fmull curve25519_athlon_121665
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl 0(%edx)
+fldl curve25519_athlon_alpha204
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha204
+fldl 0(%esp)
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl 8(%edx)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fldl 8(%esp)
+fadd %st(1),%st(0)
+fxch %st(1)
+fsubrp %st(0),%st(2)
+fxch %st(1)
+fstpl 16(%edx)
+fstpl 48(%ecx)
+fldl -120(%ecx)
+fmull -40(%ecx)
+fmull curve25519_athlon_scale
+fldl 64(%edx)
+fmull -48(%ecx)
+faddp %st(0),%st(1)
+fldl 72(%edx)
+fmull -56(%ecx)
+faddp %st(0),%st(1)
+fldl 64(%edx)
+fmull -40(%ecx)
+fldl 80(%edx)
+fmull -64(%ecx)
+faddp %st(0),%st(2)
+fldl 72(%edx)
+fmull -48(%ecx)
+faddp %st(0),%st(1)
+fldl 88(%edx)
+fmull -72(%ecx)
+faddp %st(0),%st(2)
+fldl 80(%edx)
+fmull -56(%ecx)
+faddp %st(0),%st(1)
+fldl 96(%edx)
+fmull -80(%ecx)
+faddp %st(0),%st(2)
+fldl 88(%edx)
+fmull -64(%ecx)
+faddp %st(0),%st(1)
+fldl 72(%edx)
+fmull -40(%ecx)
+fldl 104(%edx)
+fmull -88(%ecx)
+faddp %st(0),%st(3)
+fldl 96(%edx)
+fmull -72(%ecx)
+faddp %st(0),%st(2)
+fldl 80(%edx)
+fmull -48(%ecx)
+faddp %st(0),%st(1)
+fldl 112(%edx)
+fmull -96(%ecx)
+faddp %st(0),%st(3)
+fldl 104(%edx)
+fmull -80(%ecx)
+faddp %st(0),%st(2)
+fldl 88(%edx)
+fmull -56(%ecx)
+faddp %st(0),%st(1)
+fldl 120(%edx)
+fmull -104(%ecx)
+faddp %st(0),%st(3)
+fldl 112(%edx)
+fmull -88(%ecx)
+faddp %st(0),%st(2)
+fldl 96(%edx)
+fmull -64(%ecx)
+faddp %st(0),%st(1)
+fldl -128(%ecx)
+fmull -112(%ecx)
+faddp %st(0),%st(3)
+fldl 120(%edx)
+fmull -96(%ecx)
+faddp %st(0),%st(2)
+fldl 104(%edx)
+fmull -72(%ecx)
+faddp %st(0),%st(1)
+fldl 80(%edx)
+fmull -40(%ecx)
+fldl 112(%edx)
+fmull -80(%ecx)
+faddp %st(0),%st(2)
+fldl -128(%ecx)
+fmull -104(%ecx)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha230
+fadd %st(4),%st(0)
+fldl 88(%edx)
+fmull -48(%ecx)
+faddp %st(0),%st(2)
+fldl 120(%edx)
+fmull -88(%ecx)
+faddp %st(0),%st(3)
+fldl -120(%ecx)
+fmull -112(%ecx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha230
+fldl 96(%edx)
+fmull -56(%ecx)
+faddp %st(0),%st(2)
+fldl 88(%edx)
+fmull -40(%ecx)
+fldl -128(%ecx)
+fmull -96(%ecx)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fldl 104(%edx)
+fmull -64(%ecx)
+faddp %st(0),%st(3)
+fldl 96(%edx)
+fmull -48(%ecx)
+faddp %st(0),%st(2)
+fsubrp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(4),%st(0)
+fldl 112(%edx)
+fmull -72(%ecx)
+faddp %st(0),%st(3)
+fldl 104(%edx)
+fmull -56(%ecx)
+faddp %st(0),%st(2)
+fldl -120(%ecx)
+fmull -104(%ecx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha255
+fldl 120(%edx)
+fmull -80(%ecx)
+faddp %st(0),%st(3)
+fldl 96(%edx)
+fmull -40(%ecx)
+fldl 112(%edx)
+fmull -64(%ecx)
+faddp %st(0),%st(3)
+fldl -128(%ecx)
+fmull -88(%ecx)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(5)
+fxch %st(5)
+fstpl 0(%esp)
+fldl 104(%edx)
+fmull -48(%ecx)
+faddp %st(0),%st(5)
+fldl 120(%edx)
+fmull -72(%ecx)
+faddp %st(0),%st(1)
+fldl -120(%ecx)
+fmull -96(%ecx)
+faddp %st(0),%st(2)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fldl 112(%edx)
+fmull -56(%ecx)
+faddp %st(0),%st(5)
+fldl -128(%ecx)
+fmull -80(%ecx)
+faddp %st(0),%st(3)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 64(%edx)
+fmull -112(%ecx)
+faddp %st(0),%st(2)
+fxch %st(3)
+fstpl 8(%esp)
+fldl 104(%edx)
+fmull -40(%ecx)
+fldl 120(%edx)
+fmull -64(%ecx)
+faddp %st(0),%st(5)
+fldl -120(%ecx)
+fmull -88(%ecx)
+faddp %st(0),%st(3)
+fldl 64(%edx)
+fmull -104(%ecx)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fldl 112(%edx)
+fmull -48(%ecx)
+faddp %st(0),%st(2)
+fldl -128(%ecx)
+fmull -72(%ecx)
+faddp %st(0),%st(6)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 72(%edx)
+fmull -112(%ecx)
+faddp %st(0),%st(5)
+fxch %st(3)
+fsubl curve25519_athlon_alpha26
+fldl 120(%edx)
+fmull -56(%ecx)
+faddp %st(0),%st(2)
+fldl -120(%ecx)
+fmull -80(%ecx)
+faddp %st(0),%st(6)
+fldl 64(%edx)
+fmull -96(%ecx)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(2)
+fldl 112(%edx)
+fmull -40(%ecx)
+fldl -128(%ecx)
+fmull -64(%ecx)
+faddp %st(0),%st(2)
+fxch %st(5)
+fmull curve25519_athlon_scale
+fldl 72(%edx)
+fmull -104(%ecx)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha51
+fadd %st(5),%st(0)
+fldl 120(%edx)
+fmull -48(%ecx)
+faddp %st(0),%st(7)
+fldl -120(%ecx)
+fmull -72(%ecx)
+faddp %st(0),%st(3)
+fldl 64(%edx)
+fmull -88(%ecx)
+faddp %st(0),%st(2)
+fldl 80(%edx)
+fmull -112(%ecx)
+faddp %st(0),%st(5)
+fsubl curve25519_athlon_alpha51
+fxch %st(3)
+fstpl 16(%esp)
+fldl -128(%ecx)
+fmull -56(%ecx)
+faddp %st(0),%st(6)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 72(%edx)
+fmull -96(%ecx)
+faddp %st(0),%st(2)
+fxch %st(2)
+fadd %st(0),%st(3)
+fsubrp %st(0),%st(4)
+fldl 120(%edx)
+fmull -40(%ecx)
+fldl -120(%ecx)
+fmull -64(%ecx)
+faddp %st(0),%st(6)
+fldl 64(%edx)
+fmull -80(%ecx)
+faddp %st(0),%st(3)
+fldl 80(%edx)
+fmull -104(%ecx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha77
+fadd %st(4),%st(0)
+fldl -128(%ecx)
+fmull -48(%ecx)
+faddp %st(0),%st(2)
+fxch %st(6)
+fmull curve25519_athlon_scale
+fldl 72(%edx)
+fmull -88(%ecx)
+faddp %st(0),%st(4)
+fldl 88(%edx)
+fmull -112(%ecx)
+faddp %st(0),%st(3)
+fxch %st(6)
+fsubl curve25519_athlon_alpha77
+fxch %st(5)
+fstpl 24(%esp)
+fldl -120(%ecx)
+fmull -56(%ecx)
+faddp %st(0),%st(1)
+fldl 64(%edx)
+fmull -72(%ecx)
+faddp %st(0),%st(6)
+fldl 80(%edx)
+fmull -96(%ecx)
+faddp %st(0),%st(3)
+fxch %st(4)
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(3)
+fldl -128(%ecx)
+fmull -40(%ecx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 72(%edx)
+fmull -80(%ecx)
+faddp %st(0),%st(6)
+fldl 88(%edx)
+fmull -104(%ecx)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fldl -120(%ecx)
+fmull -48(%ecx)
+faddp %st(0),%st(6)
+fldl 64(%edx)
+fmull -64(%ecx)
+faddp %st(0),%st(2)
+fldl 80(%edx)
+fmull -88(%ecx)
+faddp %st(0),%st(7)
+fldl 96(%edx)
+fmull -112(%ecx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha102
+fxch %st(4)
+fstpl 32(%esp)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 72(%edx)
+fmull -72(%ecx)
+faddp %st(0),%st(5)
+fldl 88(%edx)
+fmull -96(%ecx)
+faddp %st(0),%st(6)
+fxch %st(3)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(1)
+fldl 64(%edx)
+fmull -56(%ecx)
+faddp %st(0),%st(3)
+fldl 80(%edx)
+fmull -80(%ecx)
+faddp %st(0),%st(4)
+fldl 96(%edx)
+fmull -104(%ecx)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha128
+fadd %st(2),%st(0)
+fldl 72(%edx)
+fmull -64(%ecx)
+faddp %st(0),%st(4)
+fldl 88(%edx)
+fmull -88(%ecx)
+faddp %st(0),%st(5)
+fldl 104(%edx)
+fmull -112(%ecx)
+faddp %st(0),%st(6)
+fsubl curve25519_athlon_alpha128
+fxch %st(1)
+fstpl 40(%esp)
+fldl 80(%edx)
+fmull -72(%ecx)
+faddp %st(0),%st(3)
+fldl 96(%edx)
+fmull -96(%ecx)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(1)
+fstpl 48(%esp)
+fldl 88(%edx)
+fmull -80(%ecx)
+faddp %st(0),%st(1)
+fldl 104(%edx)
+fmull -104(%ecx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha153
+fadd %st(3),%st(0)
+fldl 96(%edx)
+fmull -88(%ecx)
+faddp %st(0),%st(2)
+fldl 112(%edx)
+fmull -112(%ecx)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha153
+fldl 104(%edx)
+fmull -96(%ecx)
+faddp %st(0),%st(2)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(3)
+fxch %st(2)
+fstpl 24(%edx)
+fldl 112(%edx)
+fmull -104(%ecx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fldl 120(%edx)
+fmull -112(%ecx)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha179
+fldl 0(%esp)
+fldl 8(%esp)
+fxch %st(2)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(3)
+fldl curve25519_athlon_alpha204
+fadd %st(4),%st(0)
+fsubl curve25519_athlon_alpha204
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(4)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+faddp %st(0),%st(2)
+fxch %st(2)
+fstpl 32(%edx)
+fxch %st(2)
+fstpl 48(%edx)
+fstpl 56(%edx)
+fstpl -112(%ecx)
+fldl -48(%edx)
+faddl 64(%edx)
+fstpl -104(%ecx)
+fldl -40(%edx)
+faddl 72(%edx)
+fstpl -96(%ecx)
+fldl -32(%edx)
+faddl 80(%edx)
+fstpl -88(%ecx)
+fldl -24(%edx)
+faddl 88(%edx)
+fstpl -80(%ecx)
+fldl -16(%edx)
+faddl 96(%edx)
+fstpl -16(%edx)
+fldl -8(%edx)
+faddl 104(%edx)
+fstpl -8(%edx)
+fldl 0(%edx)
+faddl 112(%edx)
+fstpl 0(%edx)
+fldl 8(%edx)
+faddl 120(%edx)
+fstpl 8(%edx)
+fldl 16(%edx)
+faddl -128(%ecx)
+fstpl 16(%edx)
+fldl 48(%ecx)
+faddl -120(%ecx)
+fstpl 80(%edx)
+fldl 128(%ecx)
+fmull -128(%edx)
+fmull curve25519_athlon_scale
+fldl 56(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(1)
+fldl 64(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(1)
+fldl 56(%ecx)
+fmull -128(%edx)
+fldl 72(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(2)
+fldl 64(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(1)
+fldl 80(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(2)
+fldl 72(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(1)
+fldl 88(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(2)
+fldl 80(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(1)
+fldl 64(%ecx)
+fmull -128(%edx)
+fldl 96(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(3)
+fldl 88(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(2)
+fldl 72(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(1)
+fldl 104(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(3)
+fldl 96(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(2)
+fldl 80(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(1)
+fldl 112(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(3)
+fldl 104(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(2)
+fldl 88(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(1)
+fldl 120(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(3)
+fldl 112(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(2)
+fldl 96(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(1)
+fldl 72(%ecx)
+fmull -128(%edx)
+fldl 104(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(2)
+fldl 120(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha230
+fadd %st(4),%st(0)
+fldl 80(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(2)
+fldl 112(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(3)
+fldl 128(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha230
+fldl 88(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(2)
+fldl 80(%ecx)
+fmull -128(%edx)
+fldl 120(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fldl 96(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(3)
+fldl 88(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(2)
+fsubrp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(4),%st(0)
+fldl 104(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(3)
+fldl 96(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(2)
+fldl 128(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha255
+fldl 112(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(3)
+fldl 88(%ecx)
+fmull -128(%edx)
+fldl 104(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(3)
+fldl 120(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(5)
+fxch %st(5)
+fstpl 0(%esp)
+fldl 96(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(5)
+fldl 112(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(1)
+fldl 128(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(2)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fldl 104(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(5)
+fldl 120(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(3)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 56(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(2)
+fxch %st(3)
+fstpl 8(%esp)
+fldl 96(%ecx)
+fmull -128(%edx)
+fldl 112(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(5)
+fldl 128(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(3)
+fldl 56(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fldl 104(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(2)
+fldl 120(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(6)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 64(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(5)
+fxch %st(3)
+fsubl curve25519_athlon_alpha26
+fldl 112(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(2)
+fldl 128(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(6)
+fldl 56(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(2)
+fldl 104(%ecx)
+fmull -128(%edx)
+fldl 120(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(2)
+fxch %st(5)
+fmull curve25519_athlon_scale
+fldl 64(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha51
+fadd %st(5),%st(0)
+fldl 112(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(7)
+fldl 128(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(3)
+fldl 56(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(2)
+fldl 72(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(5)
+fsubl curve25519_athlon_alpha51
+fxch %st(3)
+fstpl -48(%edx)
+fldl 120(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(6)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 64(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(2)
+fxch %st(2)
+fadd %st(0),%st(3)
+fsubrp %st(0),%st(4)
+fldl 112(%ecx)
+fmull -128(%edx)
+fldl 128(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(6)
+fldl 56(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(3)
+fldl 72(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha77
+fadd %st(4),%st(0)
+fldl 120(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(2)
+fxch %st(6)
+fmull curve25519_athlon_scale
+fldl 64(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(4)
+fldl 80(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(3)
+fxch %st(6)
+fsubl curve25519_athlon_alpha77
+fxch %st(5)
+fstpl -40(%edx)
+fldl 128(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(1)
+fldl 56(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(6)
+fldl 72(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(3)
+fxch %st(4)
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(3)
+fldl 120(%ecx)
+fmull -128(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 64(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(6)
+fldl 80(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fldl 128(%ecx)
+fmull 120(%esp)
+faddp %st(0),%st(6)
+fldl 56(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(2)
+fldl 72(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(7)
+fldl 88(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha102
+fxch %st(4)
+fstpl -32(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 64(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(5)
+fldl 80(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(6)
+fxch %st(3)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(1)
+fldl 56(%ecx)
+fmull 112(%esp)
+faddp %st(0),%st(3)
+fldl 72(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(4)
+fldl 88(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha128
+fadd %st(2),%st(0)
+fldl 64(%ecx)
+fmull 104(%esp)
+faddp %st(0),%st(4)
+fldl 80(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(5)
+fldl 96(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(6)
+fsubl curve25519_athlon_alpha128
+fxch %st(1)
+fstpl -24(%edx)
+fldl 72(%ecx)
+fmull 96(%esp)
+faddp %st(0),%st(3)
+fldl 88(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(1)
+fstpl 96(%edx)
+fldl 80(%ecx)
+fmull 88(%esp)
+faddp %st(0),%st(1)
+fldl 96(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha153
+fadd %st(3),%st(0)
+fldl 88(%ecx)
+fmull 80(%esp)
+faddp %st(0),%st(2)
+fldl 104(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha153
+fldl 96(%ecx)
+fmull 72(%esp)
+faddp %st(0),%st(2)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(3)
+fxch %st(2)
+fstpl 104(%edx)
+fldl 104(%ecx)
+fmull 64(%esp)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fldl 112(%ecx)
+fmull 56(%esp)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha179
+fldl 0(%esp)
+fldl 8(%esp)
+fxch %st(2)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(3)
+fldl curve25519_athlon_alpha204
+fadd %st(4),%st(0)
+fsubl curve25519_athlon_alpha204
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(4)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+faddp %st(0),%st(2)
+fxch %st(2)
+fstpl 112(%edx)
+fxch %st(2)
+fstpl 120(%edx)
+fstpl -128(%ecx)
+fstpl -120(%ecx)
+fldl 80(%edx)
+fmull 40(%ecx)
+fmull curve25519_athlon_scale
+fldl -104(%ecx)
+fmull 32(%ecx)
+faddp %st(0),%st(1)
+fldl -96(%ecx)
+fmull 24(%ecx)
+faddp %st(0),%st(1)
+fldl -104(%ecx)
+fmull 40(%ecx)
+fldl -88(%ecx)
+fmull 16(%ecx)
+faddp %st(0),%st(2)
+fldl -96(%ecx)
+fmull 32(%ecx)
+faddp %st(0),%st(1)
+fldl -80(%ecx)
+fmull 8(%ecx)
+faddp %st(0),%st(2)
+fldl -88(%ecx)
+fmull 24(%ecx)
+faddp %st(0),%st(1)
+fldl -16(%edx)
+fmull 0(%ecx)
+faddp %st(0),%st(2)
+fldl -80(%ecx)
+fmull 16(%ecx)
+faddp %st(0),%st(1)
+fldl -96(%ecx)
+fmull 40(%ecx)
+fldl -8(%edx)
+fmull -8(%ecx)
+faddp %st(0),%st(3)
+fldl -16(%edx)
+fmull 8(%ecx)
+faddp %st(0),%st(2)
+fldl -88(%ecx)
+fmull 32(%ecx)
+faddp %st(0),%st(1)
+fldl 0(%edx)
+fmull -16(%ecx)
+faddp %st(0),%st(3)
+fldl -8(%edx)
+fmull 0(%ecx)
+faddp %st(0),%st(2)
+fldl -80(%ecx)
+fmull 24(%ecx)
+faddp %st(0),%st(1)
+fldl 8(%edx)
+fmull -24(%ecx)
+faddp %st(0),%st(3)
+fldl 0(%edx)
+fmull -8(%ecx)
+faddp %st(0),%st(2)
+fldl -16(%edx)
+fmull 16(%ecx)
+faddp %st(0),%st(1)
+fldl 16(%edx)
+fmull -32(%ecx)
+faddp %st(0),%st(3)
+fldl 8(%edx)
+fmull -16(%ecx)
+faddp %st(0),%st(2)
+fldl -8(%edx)
+fmull 8(%ecx)
+faddp %st(0),%st(1)
+fldl -88(%ecx)
+fmull 40(%ecx)
+fldl 0(%edx)
+fmull 0(%ecx)
+faddp %st(0),%st(2)
+fldl 16(%edx)
+fmull -24(%ecx)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha230
+fadd %st(4),%st(0)
+fldl -80(%ecx)
+fmull 32(%ecx)
+faddp %st(0),%st(2)
+fldl 8(%edx)
+fmull -8(%ecx)
+faddp %st(0),%st(3)
+fldl 80(%edx)
+fmull -32(%ecx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha230
+fldl -16(%edx)
+fmull 24(%ecx)
+faddp %st(0),%st(2)
+fldl -80(%ecx)
+fmull 40(%ecx)
+fldl 16(%edx)
+fmull -16(%ecx)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fldl -8(%edx)
+fmull 16(%ecx)
+faddp %st(0),%st(3)
+fldl -16(%edx)
+fmull 32(%ecx)
+faddp %st(0),%st(2)
+fsubrp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(4),%st(0)
+fldl 0(%edx)
+fmull 8(%ecx)
+faddp %st(0),%st(3)
+fldl -8(%edx)
+fmull 24(%ecx)
+faddp %st(0),%st(2)
+fldl 80(%edx)
+fmull -24(%ecx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha255
+fldl 8(%edx)
+fmull 0(%ecx)
+faddp %st(0),%st(3)
+fldl -16(%edx)
+fmull 40(%ecx)
+fldl 0(%edx)
+fmull 16(%ecx)
+faddp %st(0),%st(3)
+fldl 16(%edx)
+fmull -8(%ecx)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(5)
+fxch %st(5)
+fstpl 0(%esp)
+fldl -8(%edx)
+fmull 32(%ecx)
+faddp %st(0),%st(5)
+fldl 8(%edx)
+fmull 8(%ecx)
+faddp %st(0),%st(1)
+fldl 80(%edx)
+fmull -16(%ecx)
+faddp %st(0),%st(2)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fldl 0(%edx)
+fmull 24(%ecx)
+faddp %st(0),%st(5)
+fldl 16(%edx)
+fmull 0(%ecx)
+faddp %st(0),%st(3)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl -104(%ecx)
+fmull -32(%ecx)
+faddp %st(0),%st(2)
+fxch %st(3)
+fstpl 8(%esp)
+fldl -8(%edx)
+fmull 40(%ecx)
+fldl 8(%edx)
+fmull 16(%ecx)
+faddp %st(0),%st(5)
+fldl 80(%edx)
+fmull -8(%ecx)
+faddp %st(0),%st(3)
+fldl -104(%ecx)
+fmull -24(%ecx)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fldl 0(%edx)
+fmull 32(%ecx)
+faddp %st(0),%st(2)
+fldl 16(%edx)
+fmull 8(%ecx)
+faddp %st(0),%st(6)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl -96(%ecx)
+fmull -32(%ecx)
+faddp %st(0),%st(5)
+fxch %st(3)
+fsubl curve25519_athlon_alpha26
+fldl 8(%edx)
+fmull 24(%ecx)
+faddp %st(0),%st(2)
+fldl 80(%edx)
+fmull 0(%ecx)
+faddp %st(0),%st(6)
+fldl -104(%ecx)
+fmull -16(%ecx)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(2)
+fldl 0(%edx)
+fmull 40(%ecx)
+fldl 16(%edx)
+fmull 16(%ecx)
+faddp %st(0),%st(2)
+fxch %st(5)
+fmull curve25519_athlon_scale
+fldl -96(%ecx)
+fmull -24(%ecx)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha51
+fadd %st(5),%st(0)
+fldl 8(%edx)
+fmull 32(%ecx)
+faddp %st(0),%st(7)
+fldl 80(%edx)
+fmull 8(%ecx)
+faddp %st(0),%st(3)
+fldl -104(%ecx)
+fmull -8(%ecx)
+faddp %st(0),%st(2)
+fldl -88(%ecx)
+fmull -32(%ecx)
+faddp %st(0),%st(5)
+fsubl curve25519_athlon_alpha51
+fxch %st(3)
+fstpl 64(%edx)
+fldl 16(%edx)
+fmull 24(%ecx)
+faddp %st(0),%st(6)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl -96(%ecx)
+fmull -16(%ecx)
+faddp %st(0),%st(2)
+fxch %st(2)
+fadd %st(0),%st(3)
+fsubrp %st(0),%st(4)
+fldl 8(%edx)
+fmull 40(%ecx)
+fldl 80(%edx)
+fmull 16(%ecx)
+faddp %st(0),%st(6)
+fldl -104(%ecx)
+fmull 0(%ecx)
+faddp %st(0),%st(3)
+fldl -88(%ecx)
+fmull -24(%ecx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha77
+fadd %st(4),%st(0)
+fldl 16(%edx)
+fmull 32(%ecx)
+faddp %st(0),%st(2)
+fxch %st(6)
+fmull curve25519_athlon_scale
+fldl -96(%ecx)
+fmull -8(%ecx)
+faddp %st(0),%st(4)
+fldl -80(%ecx)
+fmull -32(%ecx)
+faddp %st(0),%st(3)
+fxch %st(6)
+fsubl curve25519_athlon_alpha77
+fxch %st(5)
+fstpl 72(%edx)
+fldl 80(%edx)
+fmull 24(%ecx)
+faddp %st(0),%st(1)
+fldl -104(%ecx)
+fmull 8(%ecx)
+faddp %st(0),%st(6)
+fldl -88(%ecx)
+fmull -16(%ecx)
+faddp %st(0),%st(3)
+fxch %st(4)
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(3)
+fldl 16(%edx)
+fmull 40(%ecx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl -96(%ecx)
+fmull 0(%ecx)
+faddp %st(0),%st(6)
+fldl -80(%ecx)
+fmull -24(%ecx)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fldl 80(%edx)
+fmull 32(%ecx)
+faddp %st(0),%st(6)
+fldl -104(%ecx)
+fmull 16(%ecx)
+faddp %st(0),%st(2)
+fldl -88(%ecx)
+fmull -8(%ecx)
+faddp %st(0),%st(7)
+fldl -16(%edx)
+fmull -32(%ecx)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha102
+fxch %st(4)
+fstpl 80(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl -96(%ecx)
+fmull 8(%ecx)
+faddp %st(0),%st(5)
+fldl -80(%ecx)
+fmull -16(%ecx)
+faddp %st(0),%st(6)
+fxch %st(3)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(1)
+fldl -104(%ecx)
+fmull 24(%ecx)
+faddp %st(0),%st(3)
+fldl -88(%ecx)
+fmull 0(%ecx)
+faddp %st(0),%st(4)
+fldl -16(%edx)
+fmull -24(%ecx)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha128
+fadd %st(2),%st(0)
+fldl -96(%ecx)
+fmull 16(%ecx)
+faddp %st(0),%st(4)
+fldl -80(%ecx)
+fmull -8(%ecx)
+faddp %st(0),%st(5)
+fldl -8(%edx)
+fmull -32(%ecx)
+faddp %st(0),%st(6)
+fsubl curve25519_athlon_alpha128
+fxch %st(1)
+fstpl 88(%edx)
+fldl -88(%ecx)
+fmull 8(%ecx)
+faddp %st(0),%st(3)
+fldl -16(%edx)
+fmull -16(%ecx)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(1)
+fstpl -104(%ecx)
+fldl -80(%ecx)
+fmull 0(%ecx)
+faddp %st(0),%st(1)
+fldl -8(%edx)
+fmull -24(%ecx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha153
+fadd %st(3),%st(0)
+fldl -16(%edx)
+fmull -8(%ecx)
+faddp %st(0),%st(2)
+fldl 0(%edx)
+fmull -32(%ecx)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha153
+fldl -8(%edx)
+fmull -16(%ecx)
+faddp %st(0),%st(2)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(3)
+fxch %st(2)
+fstpl -96(%ecx)
+fldl 0(%edx)
+fmull -24(%ecx)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fldl 8(%edx)
+fmull -32(%ecx)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha179
+fldl 0(%esp)
+fldl 8(%esp)
+fxch %st(2)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(3)
+fldl curve25519_athlon_alpha204
+fadd %st(4),%st(0)
+fsubl curve25519_athlon_alpha204
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(4)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+faddp %st(0),%st(2)
+fxch %st(2)
+fstpl -88(%ecx)
+fxch %st(2)
+fstpl -80(%ecx)
+fstpl -72(%ecx)
+fstpl -64(%ecx)
+fldl 136(%ecx)
+fldl -120(%edx)
+fldl 16(%esp)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 16(%esp)
+fxch %st(1)
+fstpl -16(%edx)
+fstpl 0(%esp)
+fldl -112(%edx)
+fldl 24(%esp)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 24(%esp)
+fxch %st(1)
+fstpl -8(%edx)
+fstpl 8(%esp)
+fldl -104(%edx)
+fldl 32(%esp)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 32(%esp)
+fxch %st(1)
+fstpl 0(%edx)
+fstpl 16(%esp)
+fldl 40(%edx)
+fldl 40(%esp)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 40(%esp)
+fxch %st(1)
+fstpl 8(%edx)
+fstpl 24(%esp)
+fldl -96(%edx)
+fldl 48(%esp)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 48(%esp)
+fxch %st(1)
+fstpl 16(%edx)
+fstpl 32(%esp)
+fldl -88(%edx)
+fldl 24(%edx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 24(%edx)
+fxch %st(1)
+fstpl 24(%edx)
+fstpl 40(%esp)
+fldl -80(%edx)
+fldl 32(%edx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 32(%edx)
+fxch %st(1)
+fstpl 32(%edx)
+fstpl 48(%esp)
+fldl -72(%edx)
+fldl 48(%edx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 48(%edx)
+fxch %st(1)
+fstpl 40(%edx)
+fstpl -120(%edx)
+fldl -64(%edx)
+fldl 56(%edx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 56(%edx)
+fxch %st(1)
+fstpl 48(%edx)
+fstpl -112(%edx)
+fldl -56(%edx)
+fldl -112(%ecx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl -112(%ecx)
+fxch %st(1)
+fstpl 56(%edx)
+fstpl -104(%edx)
+fldl -48(%edx)
+fldl 64(%edx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 64(%edx)
+fxch %st(1)
+fstpl 64(%edx)
+fstpl -96(%edx)
+fldl -40(%edx)
+fldl 72(%edx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 72(%edx)
+fxch %st(1)
+fstpl 72(%edx)
+fstpl -88(%edx)
+fldl -32(%edx)
+fldl 80(%edx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 80(%edx)
+fxch %st(1)
+fstpl 80(%edx)
+fstpl -80(%edx)
+fldl -24(%edx)
+fldl 88(%edx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl 88(%edx)
+fxch %st(1)
+fstpl 88(%edx)
+fstpl -72(%edx)
+fldl 96(%edx)
+fldl -104(%ecx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl -104(%ecx)
+fxch %st(1)
+fstpl 96(%edx)
+fstpl -64(%edx)
+fldl 104(%edx)
+fldl -96(%ecx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl -96(%ecx)
+fxch %st(1)
+fstpl 104(%edx)
+fstpl -56(%edx)
+fldl 112(%edx)
+fldl -88(%ecx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl -88(%ecx)
+fxch %st(1)
+fstpl 112(%edx)
+fstpl -48(%edx)
+fldl 120(%edx)
+fldl -80(%ecx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl -80(%ecx)
+fxch %st(1)
+fstpl 120(%edx)
+fstpl -40(%edx)
+fldl -128(%ecx)
+fldl -72(%ecx)
+fsubr %st(1),%st(0)
+fmul %st(2),%st(0)
+fsubr %st(0),%st(1)
+faddl -72(%ecx)
+fxch %st(1)
+fstpl -128(%ecx)
+fstpl -32(%edx)
+fldl -120(%ecx)
+fldl -64(%ecx)
+fsubr %st(1),%st(0)
+fmulp %st(0),%st(2)
+fsub %st(1),%st(0)
+fxch %st(1)
+faddl -64(%ecx)
+fxch %st(1)
+fstpl -120(%ecx)
+fstpl -24(%edx)
+movl 180(%ecx),%esi
+movl 184(%ecx),%ebp
+sub $1,%ebp
+ja ._morebits
+movl 188(%ecx),%edi
+sub $4,%edi
+jb ._done
+movl (%ebx,%edi),%esi
+mov $32,%ebp
+jmp ._morebytes
+._done:
+movl 4(%esp,%eax),%eax
+fldl 0(%esp)
+fstpl 0(%eax)
+fldl 8(%esp)
+fstpl 8(%eax)
+fldl 16(%esp)
+fstpl 16(%eax)
+fldl 24(%esp)
+fstpl 24(%eax)
+fldl 32(%esp)
+fstpl 32(%eax)
+fldl 40(%esp)
+fstpl 40(%eax)
+fldl 48(%esp)
+fstpl 48(%eax)
+fldl -120(%edx)
+fstpl 56(%eax)
+fldl -112(%edx)
+fstpl 64(%eax)
+fldl -104(%edx)
+fstpl 72(%eax)
+fldl -96(%edx)
+fstpl 80(%eax)
+fldl -88(%edx)
+fstpl 88(%eax)
+fldl -80(%edx)
+fstpl 96(%eax)
+fldl -72(%edx)
+fstpl 104(%eax)
+fldl -64(%edx)
+fstpl 112(%eax)
+fldl -56(%edx)
+fstpl 120(%eax)
+fldl -48(%edx)
+fstpl 128(%eax)
+fldl -40(%edx)
+fstpl 136(%eax)
+fldl -32(%edx)
+fstpl 144(%eax)
+fldl -24(%edx)
+fstpl 152(%eax)
+movl 160(%ecx),%eax
+movl 164(%ecx),%ebx
+movl 168(%ecx),%esi
+movl 172(%ecx),%edi
+movl 176(%ecx),%ebp
+add %eax,%esp
+ret
diff --git a/curve25519/curve25519_athlon_mult.s b/curve25519/curve25519_athlon_mult.s
new file mode 100644
index 0000000..fba78ed
--- /dev/null
+++ b/curve25519/curve25519_athlon_mult.s
@@ -0,0 +1,410 @@
+.text
+.p2align 5
+.globl _curve25519_athlon_mult
+.globl curve25519_athlon_mult
+_curve25519_athlon_mult:
+curve25519_athlon_mult:
+mov %esp,%eax
+and $31,%eax
+add $32,%eax
+sub %eax,%esp
+movl %ebp,0(%esp)
+movl 4(%esp,%eax),%ecx
+movl 8(%esp,%eax),%edx
+movl 12(%esp,%eax),%ebp
+fldl 72(%edx)
+fmull 72(%ebp)
+fmull curve25519_athlon_scale
+fldl 0(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(1)
+fldl 8(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(1)
+fldl 0(%edx)
+fmull 72(%ebp)
+fldl 16(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(2)
+fldl 8(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(1)
+fldl 24(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(2)
+fldl 16(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(1)
+fldl 32(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(2)
+fldl 24(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(1)
+fldl 8(%edx)
+fmull 72(%ebp)
+fldl 40(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(3)
+fldl 32(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(2)
+fldl 16(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(1)
+fldl 48(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(3)
+fldl 40(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(2)
+fldl 24(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(1)
+fldl 56(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(3)
+fldl 48(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(2)
+fldl 32(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(1)
+fldl 64(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(3)
+fldl 56(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(2)
+fldl 40(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(1)
+fldl 16(%edx)
+fmull 72(%ebp)
+fldl 48(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(2)
+fldl 64(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha230
+fadd %st(4),%st(0)
+fldl 24(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(2)
+fldl 56(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(3)
+fldl 72(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha230
+fldl 32(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(2)
+fldl 24(%edx)
+fmull 72(%ebp)
+fldl 64(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fldl 40(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(3)
+fldl 32(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(2)
+fsubrp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(4),%st(0)
+fldl 48(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(3)
+fldl 40(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(2)
+fldl 72(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha255
+fldl 56(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(3)
+fldl 32(%edx)
+fmull 72(%ebp)
+fldl 48(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(3)
+fldl 64(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(4)
+fxch %st(1)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(5)
+fxch %st(5)
+fstpl 64(%ecx)
+fldl 40(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(5)
+fldl 56(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(1)
+fldl 72(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(2)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fldl 48(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(5)
+fldl 64(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(3)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 0(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(2)
+fxch %st(3)
+fstpl 72(%ecx)
+fldl 40(%edx)
+fmull 72(%ebp)
+fldl 56(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(5)
+fldl 72(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(3)
+fldl 0(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fldl 48(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(2)
+fldl 64(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(6)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 8(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(5)
+fxch %st(3)
+fsubl curve25519_athlon_alpha26
+fldl 56(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(2)
+fldl 72(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(6)
+fldl 0(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(2)
+fldl 48(%edx)
+fmull 72(%ebp)
+fldl 64(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(2)
+fxch %st(5)
+fmull curve25519_athlon_scale
+fldl 8(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(4)
+fldl curve25519_athlon_alpha51
+fadd %st(5),%st(0)
+fldl 56(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(7)
+fldl 72(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(3)
+fldl 0(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(2)
+fldl 16(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(5)
+fsubl curve25519_athlon_alpha51
+fxch %st(3)
+fstpl 0(%ecx)
+fldl 64(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(6)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 8(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(2)
+fxch %st(2)
+fadd %st(0),%st(3)
+fsubrp %st(0),%st(4)
+fldl 56(%edx)
+fmull 72(%ebp)
+fldl 72(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(6)
+fldl 0(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(3)
+fldl 16(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha77
+fadd %st(4),%st(0)
+fldl 64(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(2)
+fxch %st(6)
+fmull curve25519_athlon_scale
+fldl 8(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(4)
+fldl 24(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(3)
+fxch %st(6)
+fsubl curve25519_athlon_alpha77
+fxch %st(5)
+fstpl 8(%ecx)
+fldl 72(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(1)
+fldl 0(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(6)
+fldl 16(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(3)
+fxch %st(4)
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(3)
+fldl 64(%edx)
+fmull 72(%ebp)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 8(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(6)
+fldl 24(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fldl 72(%edx)
+fmull 64(%ebp)
+faddp %st(0),%st(6)
+fldl 0(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(2)
+fldl 16(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(7)
+fldl 32(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(4)
+fsubl curve25519_athlon_alpha102
+fxch %st(4)
+fstpl 16(%ecx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 8(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(5)
+fldl 24(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(6)
+fxch %st(3)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(1)
+fldl 0(%edx)
+fmull 56(%ebp)
+faddp %st(0),%st(3)
+fldl 16(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(4)
+fldl 32(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha128
+fadd %st(2),%st(0)
+fldl 8(%edx)
+fmull 48(%ebp)
+faddp %st(0),%st(4)
+fldl 24(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(5)
+fldl 40(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(6)
+fsubl curve25519_athlon_alpha128
+fxch %st(1)
+fstpl 24(%ecx)
+fldl 16(%edx)
+fmull 40(%ebp)
+faddp %st(0),%st(3)
+fldl 32(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(4)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(1)
+fstpl 32(%ecx)
+fldl 24(%edx)
+fmull 32(%ebp)
+faddp %st(0),%st(1)
+fldl 40(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha153
+fadd %st(3),%st(0)
+fldl 32(%edx)
+fmull 24(%ebp)
+faddp %st(0),%st(2)
+fldl 48(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha153
+fldl 40(%edx)
+fmull 16(%ebp)
+faddp %st(0),%st(2)
+fadd %st(0),%st(2)
+fsubrp %st(0),%st(3)
+fxch %st(2)
+fstpl 40(%ecx)
+fldl 48(%edx)
+fmull 8(%ebp)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fldl 56(%edx)
+fmull 0(%ebp)
+faddp %st(0),%st(3)
+fsubl curve25519_athlon_alpha179
+fldl 64(%ecx)
+fldl 72(%ecx)
+fxch %st(2)
+fadd %st(0),%st(4)
+fsubrp %st(0),%st(3)
+fldl curve25519_athlon_alpha204
+fadd %st(4),%st(0)
+fsubl curve25519_athlon_alpha204
+fadd %st(0),%st(1)
+fsubrp %st(0),%st(4)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+faddp %st(0),%st(2)
+fxch %st(2)
+fstpl 48(%ecx)
+fxch %st(2)
+fstpl 56(%ecx)
+fstpl 64(%ecx)
+fstpl 72(%ecx)
+movl 0(%esp),%ebp
+add %eax,%esp
+ret
diff --git a/curve25519/curve25519_athlon_square.s b/curve25519/curve25519_athlon_square.s
new file mode 100644
index 0000000..74f06e5
--- /dev/null
+++ b/curve25519/curve25519_athlon_square.s
@@ -0,0 +1,298 @@
+.text
+.p2align 5
+.globl _curve25519_athlon_square
+.globl curve25519_athlon_square
+_curve25519_athlon_square:
+curve25519_athlon_square:
+mov %esp,%eax
+and $31,%eax
+add $64,%eax
+sub %eax,%esp
+movl 8(%esp,%eax),%edx
+movl 4(%esp,%eax),%ecx
+fldl 72(%edx)
+fmul %st(0),%st(0)
+fldl 0(%edx)
+fadd %st(0),%st(0)
+fldl 8(%edx)
+fadd %st(0),%st(0)
+fldl 16(%edx)
+fadd %st(0),%st(0)
+fldl 56(%edx)
+fxch %st(4)
+fmull curve25519_athlon_scale
+fldl 72(%edx)
+fmul %st(4),%st(0)
+fldl 64(%edx)
+fmul %st(4),%st(0)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstl 0(%esp)
+fxch %st(3)
+fstl 8(%esp)
+fxch %st(3)
+fmull 64(%edx)
+faddp %st(0),%st(1)
+fldl 48(%edx)
+fxch %st(5)
+fmul %st(0),%st(3)
+fxch %st(3)
+faddp %st(0),%st(1)
+fxch %st(2)
+fadd %st(0),%st(0)
+fldl 56(%edx)
+fmul %st(2),%st(0)
+faddp %st(0),%st(4)
+fxch %st(1)
+fstl 16(%esp)
+fldl 40(%edx)
+fxch %st(5)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fadd %st(0),%st(0)
+fstpl 48(%esp)
+fldl 24(%edx)
+fadd %st(0),%st(0)
+fstl 24(%esp)
+fldl 48(%edx)
+fmul %st(1),%st(0)
+faddp %st(0),%st(4)
+fmul %st(4),%st(0)
+faddp %st(0),%st(2)
+fxch %st(3)
+fadd %st(0),%st(0)
+fstpl 40(%esp)
+fldl 32(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+fldl 8(%esp)
+fldl 72(%edx)
+fmul %st(0),%st(1)
+fldl 16(%esp)
+fmul %st(0),%st(1)
+fldl 64(%edx)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(3)
+fldl 24(%esp)
+fmul %st(0),%st(1)
+fxch %st(1)
+faddp %st(0),%st(2)
+fldl 32(%edx)
+fadd %st(0),%st(0)
+fstl 32(%esp)
+fmull 40(%edx)
+faddp %st(0),%st(6)
+fxch %st(3)
+faddp %st(0),%st(5)
+fldl curve25519_athlon_alpha255
+fadd %st(5),%st(0)
+fsubl curve25519_athlon_alpha255
+fsubr %st(0),%st(5)
+fldl 56(%edx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fldl 32(%esp)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(2)
+fldl 48(%edx)
+fmul %st(0),%st(4)
+fxch %st(4)
+faddp %st(0),%st(3)
+fxch %st(3)
+fmull 40(%esp)
+faddp %st(0),%st(1)
+fxch %st(3)
+fstpl 64(%ecx)
+fldl 40(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fxch %st(2)
+fmull curve25519_athlon_scale
+fxch %st(3)
+fstpl 72(%ecx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 24(%esp)
+fmull 72(%edx)
+fldl 0(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fldl 32(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(1)
+fldl 0(%esp)
+fmull 8(%edx)
+faddp %st(0),%st(3)
+fldl 40(%esp)
+fmull 56(%edx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha26
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha26
+fsubr %st(0),%st(2)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha51
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha51
+fsubr %st(0),%st(3)
+fldl 48(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(2)
+fxch %st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 16(%edx)
+faddp %st(0),%st(1)
+fldl 8(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha77
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha77
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl 0(%ecx)
+fldl 32(%esp)
+fmull 72(%edx)
+fldl 40(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(1)
+fldl 48(%esp)
+fmull 56(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 16(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(2)
+fldl curve25519_athlon_alpha102
+fadd %st(2),%st(0)
+fsubl curve25519_athlon_alpha102
+fsubr %st(0),%st(2)
+fxch %st(3)
+fstpl 8(%ecx)
+fldl 40(%esp)
+fmull 72(%edx)
+fldl 48(%esp)
+fmull 64(%edx)
+faddp %st(0),%st(1)
+fldl 56(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 32(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+fldl 16(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(3)
+fldl curve25519_athlon_alpha128
+fadd %st(3),%st(0)
+fsubl curve25519_athlon_alpha128
+fsubr %st(0),%st(3)
+fxch %st(1)
+fstpl 16(%ecx)
+fldl 48(%esp)
+fldl 72(%edx)
+fmul %st(0),%st(1)
+fmul %st(5),%st(0)
+fxch %st(5)
+fmull 64(%edx)
+faddp %st(0),%st(1)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 40(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 32(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull 24(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha153
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha153
+fsubr %st(0),%st(1)
+fxch %st(2)
+fstpl 24(%ecx)
+fldl 64(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(4)
+fxch %st(3)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 48(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 40(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull 32(%edx)
+faddp %st(0),%st(1)
+fldl 24(%edx)
+fmul %st(0),%st(0)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha179
+fsubr %st(0),%st(1)
+fldl 64(%edx)
+fadd %st(0),%st(0)
+fmull 72(%edx)
+fmull curve25519_athlon_scale
+fldl 0(%esp)
+fmull 56(%edx)
+faddp %st(0),%st(1)
+fldl 8(%esp)
+fmull 48(%edx)
+faddp %st(0),%st(1)
+fldl 16(%esp)
+fmull 40(%edx)
+faddp %st(0),%st(1)
+fldl 24(%esp)
+fmull 32(%edx)
+faddp %st(0),%st(1)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha204
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha204
+fsubr %st(0),%st(1)
+fldl 64(%ecx)
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fldl 72(%ecx)
+fxch %st(1)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(2)
+faddp %st(0),%st(1)
+fxch %st(4)
+fstpl 32(%ecx)
+fxch %st(4)
+fstpl 40(%ecx)
+fxch %st(1)
+fstpl 48(%ecx)
+fstpl 56(%ecx)
+fxch %st(1)
+fstpl 64(%ecx)
+fstpl 72(%ecx)
+add %eax,%esp
+ret
diff --git a/curve25519/curve25519_athlon_todouble.s b/curve25519/curve25519_athlon_todouble.s
new file mode 100644
index 0000000..2cc4539
--- /dev/null
+++ b/curve25519/curve25519_athlon_todouble.s
@@ -0,0 +1,144 @@
+.text
+.p2align 5
+.globl _curve25519_athlon_todouble
+.globl curve25519_athlon_todouble
+_curve25519_athlon_todouble:
+curve25519_athlon_todouble:
+mov %esp,%eax
+and $31,%eax
+add $96,%eax
+sub %eax,%esp
+movl 8(%esp,%eax),%ecx
+movl 0(%ecx),%edx
+movl $0x43300000,4(%esp)
+movl %edx,0(%esp)
+movl 4(%ecx),%edx
+and $0xffffff,%edx
+movl $0x45300000,12(%esp)
+movl %edx,8(%esp)
+movl 7(%ecx),%edx
+and $0xffffff,%edx
+movl $0x46b00000,20(%esp)
+movl %edx,16(%esp)
+movl 10(%ecx),%edx
+and $0xffffff,%edx
+movl $0x48300000,28(%esp)
+movl %edx,24(%esp)
+movl 13(%ecx),%edx
+and $0xffffff,%edx
+movl $0x49b00000,36(%esp)
+movl %edx,32(%esp)
+movl 16(%ecx),%edx
+movl $0x4b300000,44(%esp)
+movl %edx,40(%esp)
+movl 20(%ecx),%edx
+and $0xffffff,%edx
+movl $0x4d300000,52(%esp)
+movl %edx,48(%esp)
+movl 23(%ecx),%edx
+and $0xffffff,%edx
+movl $0x4eb00000,60(%esp)
+movl %edx,56(%esp)
+movl 26(%ecx),%edx
+and $0xffffff,%edx
+movl $0x50300000,68(%esp)
+movl %edx,64(%esp)
+movl 28(%ecx),%ecx
+shr $8,%ecx
+and $0x7fffff,%ecx
+movl $0x51b00000,76(%esp)
+movl %ecx,72(%esp)
+movl 4(%esp,%eax),%ecx
+fldl 72(%esp)
+fsubl curve25519_athlon_in9offset
+fldl curve25519_athlon_alpha255
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha255
+fsubr %st(0),%st(1)
+fldl 0(%esp)
+fsubl curve25519_athlon_in0offset
+fxch %st(1)
+fmull curve25519_athlon_scale
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha26
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha26
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 0(%ecx)
+fldl 8(%esp)
+fsubl curve25519_athlon_in1offset
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha51
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha51
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 8(%ecx)
+fldl 16(%esp)
+fsubl curve25519_athlon_in2offset
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha77
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha77
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 16(%ecx)
+fldl 24(%esp)
+fsubl curve25519_athlon_in3offset
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha102
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha102
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 24(%ecx)
+fldl 32(%esp)
+fsubl curve25519_athlon_in4offset
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha128
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha128
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 32(%ecx)
+fldl 40(%esp)
+fsubl curve25519_athlon_in5offset
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha153
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha153
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 40(%ecx)
+fldl 48(%esp)
+fsubl curve25519_athlon_in6offset
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha179
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha179
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 48(%ecx)
+fldl 56(%esp)
+fsubl curve25519_athlon_in7offset
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha204
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha204
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 56(%ecx)
+fldl 64(%esp)
+fsubl curve25519_athlon_in8offset
+faddp %st(0),%st(1)
+fldl curve25519_athlon_alpha230
+fadd %st(1),%st(0)
+fsubl curve25519_athlon_alpha230
+fsubr %st(0),%st(1)
+fxch %st(1)
+fstpl 64(%ecx)
+faddp %st(0),%st(1)
+fstpl 72(%ecx)
+add %eax,%esp
+ret
diff --git a/curve25519/curvedh.c b/curve25519/curvedh.c
new file mode 100644
index 0000000..0f2e477
--- /dev/null
+++ b/curve25519/curvedh.c
@@ -0,0 +1,83 @@
+/* Copyright 2011 Ian Goldberg
+ *
+ * This file is part of Sphinx.
+ *
+ * Sphinx is free software: you can redistribute it and/or modify
+ * it under the terms of version 3 of the GNU Lesser General Public
+ * License as published by the Free Software Foundation.
+ *
+ * Sphinx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with Sphinx. If not, see
+ * .
+ */
+
+#include
+#include
+#include "curvedh.h"
+#include "curve25519.h"
+
+#ifdef DEBUG
+static void dump(char *label, unsigned char *data)
+{
+ int i;
+ printf("%6s: ", label);
+ for(i=0;i<32;++i) {
+ printf("%02x", data[i]);
+ }
+ printf("\n");
+}
+#endif
+
+void curvedh(unsigned char *curve_out, unsigned char *exp_data, int exp_len,
+ unsigned char *base_data, int base_len)
+{
+ int i;
+
+ /* In case we error out early, wipe to 0 */
+ for(i=0;i<32;++i) { curve_out[i] = '\0'; }
+
+ /* It would be nice to throw a Python exception here; not sure how */
+ if (base_len != 32 || exp_len != 32) return;
+
+#ifdef DEBUG
+dump("base", base_data);
+dump("exp", exp_data);
+#endif
+ curve25519(curve_out, exp_data, base_data);
+#ifdef DEBUG
+dump("out", curve_out);
+#endif
+}
+
+/* Make the base point for the curve */
+void basepoint(unsigned char *curve_out)
+{
+
+ int i;
+
+ curve_out[0] = 9;
+ for(i=1;i<32;++i) { curve_out[i] = '\0'; }
+}
+
+/* Make a secret key given 32 random bytes */
+void makesecret(unsigned char *curve_out, unsigned char *exp_data, int exp_len)
+{
+ int i;
+
+ /* In case we error out early, wipe to 0 */
+ for(i=0;i<32;++i) { curve_out[i] = '\0'; }
+
+ /* It would be nice to throw a Python exception here; not sure how */
+ if (exp_len != 32) return;
+
+ memmove(curve_out, exp_data, 32);
+ curve_out[0] &= 248;
+ curve_out[31] &= 127;
+ curve_out[31] |= 64;
+}
+
diff --git a/curve25519/curvedh.h b/curve25519/curvedh.h
new file mode 100644
index 0000000..bc69989
--- /dev/null
+++ b/curve25519/curvedh.h
@@ -0,0 +1,29 @@
+/* Copyright 2011 Ian Goldberg
+ *
+ * This file is part of Sphinx.
+ *
+ * Sphinx is free software: you can redistribute it and/or modify
+ * it under the terms of version 3 of the GNU Lesser General Public
+ * License as published by the Free Software Foundation.
+ *
+ * Sphinx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with Sphinx. If not, see
+ * .
+ */
+
+#ifndef __CURVEDH_H__
+#define __CURVEDH_H__
+
+void curvedh(unsigned char *curve_out, unsigned char *exp_data, int exp_len,
+ unsigned char *base_data, int base_len);
+
+void basepoint(unsigned char *curve_out);
+
+void makesecret(unsigned char *curve_out, unsigned char *exp_data, int exp_len);
+
+#endif
diff --git a/curve25519/curvedh.i b/curve25519/curvedh.i
new file mode 100644
index 0000000..0fb9813
--- /dev/null
+++ b/curve25519/curvedh.i
@@ -0,0 +1,34 @@
+/* Copyright 2011 Ian Goldberg
+ *
+ * This file is part of Sphinx.
+ *
+ * Sphinx is free software: you can redistribute it and/or modify
+ * it under the terms of version 3 of the GNU Lesser General Public
+ * License as published by the Free Software Foundation.
+ *
+ * Sphinx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with Sphinx. If not, see
+ * .
+ */
+
+%module curvedh
+%{
+#include "curvedh.h"
+%}
+
+%include "cstring.i"
+%cstring_chunk_output(unsigned char *curve_out, 32);
+%apply (char *STRING, int LENGTH) { (unsigned char *base_data, int base_len) };
+%apply (char *STRING, int LENGTH) { (unsigned char *exp_data, int exp_len) };
+
+void curvedh(unsigned char *curve_out, unsigned char *exp_data, int exp_len,
+ unsigned char *base_data, int base_len);
+
+void basepoint(unsigned char *curve_out);
+
+void makesecret(unsigned char *curve_out, unsigned char *exp_data, int exp_len);
diff --git a/curve25519/testcurvedh.py b/curve25519/testcurvedh.py
new file mode 100755
index 0000000..2a88870
--- /dev/null
+++ b/curve25519/testcurvedh.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+# Copyright 2011 Ian Goldberg
+#
+# This file is part of Sphinx.
+#
+# Sphinx is free software: you can redistribute it and/or modify
+# it under the terms of version 3 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# Sphinx is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with Sphinx. If not, see
+# .
+
+import os
+from curvedh import *
+
+g = basepoint()
+
+print "g:", g.encode("hex")
+
+# Make a secret key from 32 random bytes
+x = makesecret(os.urandom(32))
+y = makesecret(os.urandom(32))
+
+print "x:", x.encode("hex")
+print "y:", y.encode("hex")
+
+# Do the exponentiation operation. Note: first exponent, then base
+X = curvedh(x,g)
+Y = curvedh(y,g)
+
+print "X:", X.encode("hex")
+print "Y:", Y.encode("hex")
+
+s1 = curvedh(x,Y)
+s2 = curvedh(y,X)
+
+print "s:", s1.encode("hex")
+print "s:", s2.encode("hex")
+
+assert s1 == s2
+
+print "DH Success!"
diff --git a/curve25519/x86cpuid.c b/curve25519/x86cpuid.c
new file mode 100644
index 0000000..98e37db
--- /dev/null
+++ b/curve25519/x86cpuid.c
@@ -0,0 +1,38 @@
+#include
+
+void nope()
+{
+ exit(1);
+}
+
+int main()
+{
+ unsigned long x[4];
+ unsigned long y[4];
+ int i;
+ int j;
+ char c;
+
+ signal(SIGILL,nope);
+
+ x[0] = 0;
+ x[1] = 0;
+ x[2] = 0;
+ x[3] = 0;
+
+ asm volatile(".byte 15;.byte 162" : "=a"(x[0]),"=b"(x[1]),"=c"(x[3]),"=d"(x[2]) : "0"(0) );
+ if (!x[0]) return 0;
+ asm volatile(".byte 15;.byte 162" : "=a"(y[0]),"=b"(y[1]),"=c"(y[2]),"=d"(y[3]) : "0"(1) );
+
+ for (i = 1;i < 4;++i)
+ for (j = 0;j < 4;++j) {
+ c = x[i] >> (8 * j);
+ if (c < 32) c = 32;
+ if (c > 126) c = 126;
+ putchar(c);
+ }
+
+ printf("-%08x-%08x\n",y[0],y[3]);
+
+ return 0;
+}