From: Ramakrishnan Muthukrishnan Date: Thu, 21 Jan 2016 15:54:29 +0000 (+0530) Subject: zfec: rearrange files X-Git-Url: https://git.rkrishnan.org/components/com_hotproperty/frontends/flags/something?a=commitdiff_plain;h=307b550ab0df85341b6fb662f4e1523d611d238b;p=tahoe-lafs%2Fzfec.git zfec: rearrange files --- diff --git a/COPYING.GPL b/COPYING.GPL new file mode 100644 index 0000000..5075166 --- /dev/null +++ b/COPYING.GPL @@ -0,0 +1,451 @@ +This work also comes with the added permission that you may combine it with a +work licensed under the OpenSSL license (any version) and distribute the +resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the OpenSSL license. + +This work also comes with the added permission that you may combine it with a +work licensed under the Eclipse Public Licence (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Eclipse Public Licence. + +This work also comes with the added permission that you may combine it with a +work licensed under the Q Public Licence (any version) and distribute the +resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Q Public Licence. + +This work also comes with the added permission that you may combine it with a +work licensed under the Apache Licence (any version) and distribute the +resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Apache Licence. + +This work also comes with the added permission that you may combine it with a +work licensed under the GNU Lesser General Public License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the GNU Lesser +General Public License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Zope Public License (any version) and distribute the +resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Zope Public License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Python Software Foundation License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Python +Software Foundation License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Academic Free License (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Academic Free License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Apple Public Source License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Apple Public +Source License. + +This work also comes with the added permission that you may combine it with a +work licensed under the BitTorrent Open Source License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the BitTorrent +Open Source License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Lucent Public License (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Lucent Public License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Jabber Open Source License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Jabber Open +Source License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Common Development and Distribution License (any +version) and distribute the resulting combined work, as long as you follow +the requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Common +Development and Distribution License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Microsoft Public License (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Microsoft Public License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Microsoft Reciprocal License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Microsoft +Reciprocal License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Sun Industry Standards Source License (any version) +and distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Sun Industry +Standards Source License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Open Software License (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Open Software License. + + + + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/COPYING.TGPPL.rst b/COPYING.TGPPL.rst new file mode 100644 index 0000000..e89b079 --- /dev/null +++ b/COPYING.TGPPL.rst @@ -0,0 +1,291 @@ +This work also comes with the added permission that you may combine it with a +work licensed under the OpenSSL license (any version) and distribute the +resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the OpenSSL license. + +This work also comes with the added permission that you may combine it with a +work licensed under the Eclipse Public Licence (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Eclipse Public Licence. + +This work also comes with the added permission that you may combine it with a +work licensed under the Q Public Licence (any version) and distribute the +resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Q Public Licence. + +This work also comes with the added permission that you may combine it with a +work licensed under the Apache Licence (any version) and distribute the +resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Apache Licence. + +This work also comes with the added permission that you may combine it with a +work licensed under the GNU Lesser General Public License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the GNU Lesser +General Public License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Zope Public License (any version) and distribute the +resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Zope Public License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Python Software Foundation License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Python +Software Foundation License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Academic Free License (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Academic Free License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Apple Public Source License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Apple Public +Source License. + +This work also comes with the added permission that you may combine it with a +work licensed under the BitTorrent Open Source License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the BitTorrent +Open Source License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Lucent Public License (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Lucent Public License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Jabber Open Source License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Jabber Open +Source License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Common Development and Distribution License (any +version) and distribute the resulting combined work, as long as you follow +the requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Common +Development and Distribution License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Microsoft Public License (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Microsoft Public License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Microsoft Reciprocal License (any version) and +distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Microsoft +Reciprocal License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Sun Industry Standards Source License (any version) +and distribute the resulting combined work, as long as you follow the +requirements of the licences of this work in regard to all of the +resulting combined work aside from the work licensed under the Sun Industry +Standards Source License. + +This work also comes with the added permission that you may combine it with a +work licensed under the Open Software License (any version) and distribute +the resulting combined work, as long as you follow the requirements of the +licences of this work in regard to all of the resulting combined work +aside from the work licensed under the Open Software License. + +======================================================= +Transitive Grace Period Public Licence ("TGPPL") v. 1.0 +======================================================= + +This Transitive Grace Period Public Licence (the "License") applies to any +original work of authorship (the "Original Work") whose owner (the +"Licensor") has placed the following licensing notice adjacent to the +copyright notice for the Original Work: + + *Licensed under the Transitive Grace Period Public Licence version 1.0* + +1. **Grant of Copyright License.** Licensor grants You a worldwide, + royalty-free, non-exclusive, sublicensable license, for the duration of + the copyright, to do the following: + + a. to reproduce the Original Work in copies, either alone or as part of a + collective work; + + b. to translate, adapt, alter, transform, modify, or arrange the Original + Work, thereby creating derivative works ("Derivative Works") based upon + the Original Work; + + c. to distribute or communicate copies of the Original Work and Derivative + Works to the public, with the proviso that copies of Original Work or + Derivative Works that You distribute or communicate shall be licensed + under this Transitive Grace Period Public Licence no later than 12 + months after You distributed or communicated said copies; + + d. to perform the Original Work publicly; and + + e. to display the Original Work publicly. + +2. **Grant of Patent License.** Licensor grants You a worldwide, + royalty-free, non-exclusive, sublicensable license, under patent claims + owned or controlled by the Licensor that are embodied in the Original + Work as furnished by the Licensor, for the duration of the patents, to + make, use, sell, offer for sale, have made, and import the Original Work + and Derivative Works. + +3. **Grant of Source Code License.** The term "Source Code" means the + preferred form of the Original Work for making modifications to it and + all available documentation describing how to modify the Original + Work. Licensor agrees to provide a machine-readable copy of the Source + Code of the Original Work along with each copy of the Original Work that + Licensor distributes. Licensor reserves the right to satisfy this + obligation by placing a machine-readable copy of the Source Code in an + information repository reasonably calculated to permit inexpensive and + convenient access by You for as long as Licensor continues to distribute + the Original Work. + +4. **Exclusions From License Grant.** Neither the names of Licensor, nor the + names of any contributors to the Original Work, nor any of their + trademarks or service marks, may be used to endorse or promote products + derived from this Original Work without express prior permission of the + Licensor. Except as expressly stated herein, nothing in this License + grants any license to Licensor's trademarks, copyrights, patents, trade + secrets or any other intellectual property. No patent license is granted + to make, use, sell, offer for sale, have made, or import embodiments of + any patent claims other than the licensed claims defined in Section 2. No + license is granted to the trademarks of Licensor even if such marks are + included in the Original Work. Nothing in this License shall be + interpreted to prohibit Licensor from licensing under terms different + from this License any Original Work that Licensor otherwise would have a + right to license. + +5. **External Deployment.** The term "External Deployment" means the use, + distribution, or communication of the Original Work or Derivative Works + in any way such that the Original Work or Derivative Works may be used by + anyone other than You, whether those works are distributed or + communicated to those persons or made available as an application + intended for use over a network. As an express condition for the grants + of license hereunder, You must treat any External Deployment by You of + the Original Work or a Derivative Work as a distribution under section + 1(c). + +6. **Attribution Rights.** You must retain, in the Source Code of any + Derivative Works that You create, all copyright, patent, or trademark + notices from the Source Code of the Original Work, as well as any notices + of licensing and any descriptive text identified therein as an + "Attribution Notice." You must cause the Source Code for any Derivative + Works that You create to carry a prominent Attribution Notice reasonably + calculated to inform recipients that You have modified the Original Work. + +7. **Warranty of Provenance and Disclaimer of Warranty.** Licensor warrants + that the copyright in and to the Original Work and the patent rights + granted herein by Licensor are owned by the Licensor or are sublicensed + to You under the terms of this License with the permission of the + contributor(s) of those copyrights and patent rights. Except as expressly + stated in the immediately preceding sentence, the Original Work is + provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, + either express or implied, including, without limitation, the warranties + of non-infringement, merchantability or fitness for a particular + purpose. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH + YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this + License. No license to the Original Work is granted by this License + except under this disclaimer. + +8. **Limitation of Liability.** Under no circumstances and under no legal + theory, whether in tort (including negligence), contract, or otherwise, + shall the Licensor be liable to anyone for any indirect, special, + incidental, or consequential damages of any character arising as a result + of this License or the use of the Original Work including, without + limitation, damages for loss of goodwill, work stoppage, computer failure + or malfunction, or any and all other commercial damages or losses. This + limitation of liability shall not apply to the extent applicable law + prohibits such limitation. + +9. **Acceptance and Termination.** If, at any time, You expressly assented + to this License, that assent indicates your clear and irrevocable + acceptance of this License and all of its terms and conditions. If You + distribute or communicate copies of the Original Work or a Derivative + Work, You must make a reasonable effort under the circumstances to obtain + the express assent of recipients to the terms of this License. This + License conditions your rights to undertake the activities listed in + Section 1, including your right to create Derivative Works based upon the + Original Work, and doing so without honoring these terms and conditions + is prohibited by copyright law and international treaty. Nothing in this + License is intended to affect copyright exceptions and limitations + (including 'fair use' or 'fair dealing'). This License shall terminate + immediately and You may no longer exercise any of the rights granted to + You by this License upon your failure to honor the conditions in Section + 1(c). + +10. **Termination for Patent Action.** This License shall terminate + automatically and You may no longer exercise any of the rights granted to + You by this License as of the date You commence an action, including a + cross-claim or counterclaim, against Licensor or any licensee alleging + that the Original Work infringes a patent. This termination provision + shall not apply for an action alleging patent infringement by + combinations of the Original Work with other software or hardware. + +11. **Jurisdiction, Venue and Governing Law.** Any action or suit relating to + this License may be brought only in the courts of a jurisdiction wherein + the Licensor resides or in which Licensor conducts its primary business, + and under the laws of that jurisdiction excluding its conflict-of-law + provisions. The application of the United Nations Convention on Contracts + for the International Sale of Goods is expressly excluded. Any use of the + Original Work outside the scope of this License or after its termination + shall be subject to the requirements and penalties of copyright or patent + law in the appropriate jurisdiction. This section shall survive the + termination of this License. + +12. **Attorneys' Fees.** In any action to enforce the terms of this License + or seeking damages relating thereto, the prevailing party shall be + entitled to recover its costs and expenses, including, without + limitation, reasonable attorneys' fees and costs incurred in connection + with such action, including any appeal of such action. This section shall + survive the termination of this License. + +13. **Miscellaneous.** If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. + +14. **Definition of "You" in This License.** "You" throughout this License, + whether in upper or lower case, means an individual or a legal entity + exercising rights under, and complying with all of the terms of, this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with you. For purposes of this + definition, "control" means (i) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + +15. **Right to Use.** You may use the Original Work in all ways not otherwise + restricted or conditioned by this License or by law, and Licensor + promises not to interfere with or be responsible for such uses by You. + +16. **Modification of This License.** This License is Copyright © 2007 Zooko + Wilcox-O'Hearn. Permission is granted to copy, distribute, or communicate + this License without modification. Nothing in this License permits You to + modify this License as applied to the Original Work or to Derivative + Works. However, You may modify the text of this License and copy, + distribute or communicate your modified version (the "Modified License") + and apply it to other original works of authorship subject to the + following conditions: (i) You may not indicate in any way that your + Modified License is the "Transitive Grace Period Public Licence" or + "TGPPL" and you may not use those names in the name of your Modified + License; and (ii) You must replace the notice specified in the first + paragraph above with the notice "Licensed under " or with a notice of your own that is not confusingly similar to + the notice in this License. diff --git a/NEWS.txt b/NEWS.txt new file mode 100644 index 0000000..15c442d --- /dev/null +++ b/NEWS.txt @@ -0,0 +1,10 @@ +User visible changes in zfec. -*- outline -*- + +* Release 1.4.5 (2009-06-17) + +** Bug fixes + +Fix seg fault if the Python classes Encoder or Decoder are constructed with k or m less than 1, greater than 256, or with k greater m. +Fix several compiler warnings, add unit tests, improve Python packaging, set up more buildbots to run the unit tests on more platforms. + +For details about older releases, see the version control history. diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..0db69fc --- /dev/null +++ b/README.rst @@ -0,0 +1,325 @@ + + +zfec -- efficient, portable erasure coding tool +=============================================== + +Generate redundant blocks of information such that if some of the blocks are +lost then the original data can be recovered from the remaining blocks. This +package includes command-line tools, C API, Python API, and Haskell API. + + +Intro and Licence +----------------- + +This package implements an "erasure code", or "forward error correction +code". + +You may use this package under the GNU General Public License, version 2 or, +at your option, any later version. You may use this package under the +Transitive Grace Period Public Licence, version 1.0 or, at your option, any +later version. (You may choose to use this package under the terms of either +licence, at your option.) See the file COPYING.GPL for the terms of the GNU +General Public License, version 2. See the file COPYING.TGPPL.rst for the +terms of the Transitive Grace Period Public Licence, version 1.0. + +The most widely known example of an erasure code is the RAID-5 algorithm +which makes it so that in the event of the loss of any one hard drive, the +stored data can be completely recovered. The algorithm in the zfec package +has a similar effect, but instead of recovering from the loss of only a +single element, it can be parameterized to choose in advance the number of +elements whose loss it can tolerate. + +This package is largely based on the old "fec" library by Luigi Rizzo et al., +which is a mature and optimized implementation of erasure coding. The zfec +package makes several changes from the original "fec" package, including +addition of the Python API, refactoring of the C API to support zero-copy +operation, a few clean-ups and optimizations of the core code itself, and the +addition of a command-line tool named "zfec". + + +Installation +------------ + +This package is managed with the "setuptools" package management tool. To +build and install the package directly into your system, just run ``python +./setup.py install``. If you prefer to keep the package limited to a +specific directory so that you can manage it yourself (perhaps by using the +"GNU stow") tool, then give it these arguments: ``python ./setup.py install +--single-version-externally-managed +--record=${specificdirectory}/zfec-install.log +--prefix=${specificdirectory}`` + +To run the self-tests, execute ``python ./setup.py test`` (or if you have +Twisted Python installed, you can run ``trial zfec`` for nicer output and +test options.) This will run the tests of the C API, the Python API, and the +command-line tools. + +To run the tests of the Haskell API: ``runhaskell haskell/test/FECTest.hs`` + +Note that in order to run the Haskell API tests you must have installed the +library first due to the fact that the interpreter cannot process FEC.hs as +it takes a reference to an FFI function. + + +Community +--------- + +The source is currently available via darcs on the web with the command: + +darcs get https://tahoe-lafs.org/source/zfec/trunk + +More information on darcs is available at http://darcs.net + +Please post about zfec to the Tahoe-LAFS mailing list and contribute patches: + + + + +Overview +-------- + +This package performs two operations, encoding and decoding. Encoding takes +some input data and expands its size by producing extra "check blocks", also +called "secondary blocks". Decoding takes some data -- any combination of +blocks of the original data (called "primary blocks") and "secondary blocks", +and produces the original data. + +The encoding is parameterized by two integers, k and m. m is the total +number of blocks produced, and k is how many of those blocks are necessary to +reconstruct the original data. m is required to be at least 1 and at most +256, and k is required to be at least 1 and at most m. + +(Note that when k == m then there is no point in doing erasure coding -- it +degenerates to the equivalent of the Unix "split" utility which simply splits +the input into successive segments. Similarly, when k == 1 it degenerates to +the equivalent of the unix "cp" utility -- each block is a complete copy of +the input data.) + +Note that each "primary block" is a segment of the original data, so its size +is 1/k'th of the size of original data, and each "secondary block" is of the +same size, so the total space used by all the blocks is m/k times the size of +the original data (plus some padding to fill out the last primary block to be +the same size as all the others). In addition to the data contained in the +blocks themselves there are also a few pieces of metadata which are necessary +for later reconstruction. Those pieces are: 1. the value of K, 2. the +value of M, 3. the sharenum of each block, 4. the number of bytes of +padding that were used. The "zfec" command-line tool compresses these pieces +of data and prepends them to the beginning of each share, so each the +sharefile produced by the "zfec" command-line tool is between one and four +bytes larger than the share data alone. + +The decoding step requires as input k of the blocks which were produced by +the encoding step. The decoding step produces as output the data that was +earlier input to the encoding step. + + +Command-Line Tool +----------------- + +The bin/ directory contains two Unix-style, command-line tools "zfec" and +"zunfec". Execute ``zfec --help`` or ``zunfec --help`` for usage +instructions. + + +Performance +----------- + +To run the benchmarks, execute the included bench/bench_zfec.py script with +optional --k= and --m= arguments. + +On my Athlon 64 2.4 GHz workstation (running Linux), the "zfec" command-line +tool encoded a 160 MB file with m=100, k=94 (about 6% redundancy) in 3.9 +seconds, where the "par2" tool encoded the file with about 6% redundancy in +27 seconds. zfec encoded the same file with m=12, k=6 (100% redundancy) in +4.1 seconds, where par2 encoded it with about 100% redundancy in 7 minutes +and 56 seconds. + +The underlying C library in benchmark mode encoded from a file at about 4.9 +million bytes per second and decoded at about 5.8 million bytes per second. + +On Peter's fancy Intel Mac laptop (2.16 GHz Core Duo), it encoded from a file +at about 6.2 million bytes per second. + +On my even fancier Intel Mac laptop (2.33 GHz Core Duo), it encoded from a +file at about 6.8 million bytes per second. + +On my old PowerPC G4 867 MHz Mac laptop, it encoded from a file at about 1.3 +million bytes per second. + +Here is a paper analyzing the performance of various erasure codes and their +implementations, including zfec: + +http://www.usenix.org/events/fast09/tech/full_papers/plank/plank.pdf + +Zfec shows good performance on different machines and with different values +of K and M. It also has a nice small memory footprint. + + +API +--- + +Each block is associated with "blocknum". The blocknum of each primary block +is its index (starting from zero), so the 0'th block is the first primary +block, which is the first few bytes of the file, the 1'st block is the next +primary block, which is the next few bytes of the file, and so on. The last +primary block has blocknum k-1. The blocknum of each secondary block is an +arbitrary integer between k and 255 inclusive. (When using the Python API, +if you don't specify which secondary blocks you want when invoking encode(), +then it will by default provide the blocks with ids from k to m-1 inclusive.) + +- C API + + fec_encode() takes as input an array of k pointers, where each pointer + points to a memory buffer containing the input data (i.e., the i'th buffer + contains the i'th primary block). There is also a second parameter which + is an array of the blocknums of the secondary blocks which are to be + produced. (Each element in that array is required to be the blocknum of a + secondary block, i.e. it is required to be >= k and < m.) + + The output from fec_encode() is the requested set of secondary blocks which + are written into output buffers provided by the caller. + + Note that this fec_encode() is a "low-level" API in that it requires the + input data to be provided in a set of memory buffers of exactly the right + sizes. If you are starting instead with a single buffer containing all of + the data then please see easyfec.py's "class Encoder" as an example of how + to split a single large buffer into the appropriate set of input buffers + for fec_encode(). If you are starting with a file on disk, then please see + filefec.py's encode_file_stringy_easyfec() for an example of how to read + the data from a file and pass it to "class Encoder". The Python interface + provides these higher-level operations, as does the Haskell interface. If + you implement functions to do these higher-level tasks in other languages, + please send a patch to tahoe-dev@tahoe-lafs.org so that your API can be + included in future releases of zfec. + + fec_decode() takes as input an array of k pointers, where each pointer + points to a buffer containing a block. There is also a separate input + parameter which is an array of blocknums, indicating the blocknum of each + of the blocks which is being passed in. + + The output from fec_decode() is the set of primary blocks which were + missing from the input and had to be reconstructed. These reconstructed + blocks are written into output buffers provided by the caller. + + +- Python API + + encode() and decode() take as input a sequence of k buffers, where a + "sequence" is any object that implements the Python sequence protocol (such + as a list or tuple) and a "buffer" is any object that implements the Python + buffer protocol (such as a string or array). The contents that are + required to be present in these buffers are the same as for the C API. + + encode() also takes a list of desired blocknums. Unlike the C API, the + Python API accepts blocknums of primary blocks as well as secondary blocks + in its list of desired blocknums. encode() returns a list of buffer + objects which contain the blocks requested. For each requested block which + is a primary block, the resulting list contains a reference to the + apppropriate primary block from the input list. For each requested block + which is a secondary block, the list contains a newly created string object + containing that block. + + decode() also takes a list of integers indicating the blocknums of the + blocks being passed int. decode() returns a list of buffer objects which + contain all of the primary blocks of the original data (in order). For + each primary block which was present in the input list, then the result + list simply contains a reference to the object that was passed in the input + list. For each primary block which was not present in the input, the + result list contains a newly created string object containing that primary + block. + + Beware of a "gotcha" that can result from the combination of mutable data + and the fact that the Python API returns references to inputs when + possible. + + Returning references to its inputs is efficient since it avoids making an + unnecessary copy of the data, but if the object which was passed as input + is mutable and if that object is mutated after the call to zfec returns, + then the result from zfec -- which is just a reference to that same object + -- will also be mutated. This subtlety is the price you pay for avoiding + data copying. If you don't want to have to worry about this then you can + simply use immutable objects (e.g. Python strings) to hold the data that + you pass to zfec. + +- Haskell API + + The Haskell code is fully Haddocked, to generate the documentation, run + ``runhaskell Setup.lhs haddock``. + + +Utilities +--------- + +The filefec.py module has a utility function for efficiently reading a file +and encoding it piece by piece. This module is used by the "zfec" and +"zunfec" command-line tools from the bin/ directory. + + +Dependencies +------------ + +A C compiler is required. To use the Python API or the command-line tools a +Python interpreter is also required. We have tested it with Python v2.4, +v2.5, v2.6, and v2.7. For the Haskell interface, GHC >= 6.8.1 is required. + + +Acknowledgements +---------------- + +Thanks to the author of the original fec lib, Luigi Rizzo, and the folks that +contributed to it: Phil Karn, Robert Morelos-Zaragoza, Hari Thirumoorthy, and +Dan Rubenstein. Thanks to the Mnet hackers who wrote an earlier Python +wrapper, especially Myers Carpenter and Hauke Johannknecht. Thanks to Brian +Warner and Amber O'Whielacronx for help with the API, documentation, +debugging, compression, and unit tests. Thanks to Adam Langley for improving +the C API and contributing the Haskell API. Thanks to the creators of GCC +(starting with Richard M. Stallman) and Valgrind (starting with Julian +Seward) for a pair of excellent tools. Thanks to my coworkers at Allmydata +-- http://allmydata.com -- Fabrice Grinda, Peter Secor, Rob Kinninmont, Brian +Warner, Zandr Milewski, Justin Boreta, Mark Meras for sponsoring this work +and releasing it under a Free Software licence. Thanks to Jack Lloyd, Samuel +Neves, and David-Sarah Hopwood. + + +Related Works +------------- + +Note: a Unix-style tool like "zfec" does only one thing -- in this case +erasure coding -- and leaves other tasks to other tools. Other Unix-style +tools that go well with zfec include `GNU tar`_ for archiving multiple files +and directories into one file, `lzip`_ for compression, and `GNU Privacy +Guard`_ for encryption or `b2sum`_ for integrity. It is important to do +things in order: first archive, then compress, then either encrypt or +integrity-check, then erasure code. Note that if GNU Privacy Guard is used +for privacy, then it will also ensure integrity, so the use of b2sum is +unnecessary in that case. Note also that you also need to do integrity +checking (such as with b2sum) on the blocks that result from the erasure +coding in *addition* to doing it on the file contents! (There are two +different subtle failure modes -- see "more than one file can match an +immutable file cap" on the `Hack Tahoe-LAFS!`_ Hall of Fame.) + +The `Tahoe-LAFS`_ project uses zfec as part of a complete distributed +filesystem with integrated encryption, integrity, remote distribution of the +blocks, directory structure, backup of changed files or directories, access +control, immutable files and directories, proof-of-retrievability, and repair +of damaged files and directories. + +`fecpp`_ is an alternative to zfec. It implements a bitwise-compatible +algorithm to zfec and is BSD-licensed. + +.. _GNU tar: http://directory.fsf.org/project/tar/ +.. _lzip: http://www.nongnu.org/lzip/lzip.html +.. _GNU Privacy Guard: http://gnupg.org/ +.. _b2sum: https://blake2.net/ +.. _Tahoe-LAFS: https://tahoe-lafs.org +.. _Hack Tahoe-LAFS!: https://tahoe-lafs.org/hacktahoelafs/ +.. _fecpp: http://www.randombit.net/code/fecpp/ + + +Enjoy! + +Zooko Wilcox-O'Hearn + +2013-05-15 + +Boulder, Colorado diff --git a/Setup.lhs b/Setup.lhs new file mode 100755 index 0000000..5bde0de --- /dev/null +++ b/Setup.lhs @@ -0,0 +1,3 @@ +#!/usr/bin/env runhaskell +> import Distribution.Simple +> main = defaultMain diff --git a/TODO b/TODO new file mode 100644 index 0000000..b76b8b1 --- /dev/null +++ b/TODO @@ -0,0 +1,15 @@ + * INSTALL doc + + * catch EnvironmentError when writing sharefiles and clean up + * try Duff's device in _addmul1()? + * memory usage analysis + * What Every Programmer Should Know About Memory by Ulrich Drepper + * test handling of filesystem exceptional conditions (tricky) + * Jerasure + ** try multiplication without a lookup table? (to save cache pressure) + * conditional compilation to handle a printf that doesn't understand %Zu + * try malloc() instead of alloca() (more portable, possibly better for keeping stacks aligned?) + + * announce on lwn, p2p-hackers + + * streaming mode diff --git a/bench/bench_zfec.py b/bench/bench_zfec.py new file mode 100644 index 0000000..9a9b8e2 --- /dev/null +++ b/bench/bench_zfec.py @@ -0,0 +1,109 @@ +from zfec import easyfec, Encoder, filefec +from pyutil import mathutil + +import os, sys + +from pyutil import benchutil + +FNAME="benchrandom.data" + +def _make_new_rand_file(size): + open(FNAME, "wb").write(os.urandom(size)) + +def donothing(results, reslenthing): + pass + +K=3 +M=10 + +d = "" +ds = [] +easyfecenc = None +fecenc = None +def _make_new_rand_data(size, k, m): + global d, easyfecenc, fecenc, K, M + K = k + M = m + d = os.urandom(size) + del ds[:] + ds.extend([None]*k) + blocksize = mathutil.div_ceil(size, k) + for i in range(k): + ds[i] = d[i*blocksize:(i+1)*blocksize] + ds[-1] = ds[-1] + "\x00" * (len(ds[-2]) - len(ds[-1])) + easyfecenc = easyfec.Encoder(k, m) + fecenc = Encoder(k, m) + +import sha +hashers = [ sha.new() for i in range(M) ] +def hashem(results, reslenthing): + for i, result in enumerate(results): + hashers[i].update(result) + +def _encode_file(N): + filefec.encode_file(open(FNAME, "rb"), donothing, K, M) + +def _encode_file_stringy(N): + filefec.encode_file_stringy(open(FNAME, "rb"), donothing, K, M) + +def _encode_file_stringy_easyfec(N): + filefec.encode_file_stringy_easyfec(open(FNAME, "rb"), donothing, K, M) + +def _encode_file_not_really(N): + filefec.encode_file_not_really(open(FNAME, "rb"), donothing, K, M) + +def _encode_file_not_really_and_hash(N): + filefec.encode_file_not_really_and_hash(open(FNAME, "rb"), donothing, K, M) + +def _encode_file_and_hash(N): + filefec.encode_file(open(FNAME, "rb"), hashem, K, M) + +def _encode_data_not_really(N): + # This function is to see how long it takes to run the Python code + # that does this benchmarking and accounting and so on but not + # actually do any erasure-coding, in order to get an idea of how + # much overhead there is in using Python. This exercises the + # basic behavior of allocating buffers to hold the secondary + # shares. + sz = N // K + for i in range(M-K): + x = '\x00' * sz + +def _encode_data_easyfec(N): + easyfecenc.encode(d) + +def _encode_data_fec(N): + fecenc.encode(ds) + +def bench(k, m): + SIZE = 10**6 + MAXREPS = 64 + # for f in [_encode_file_stringy_easyfec, _encode_file_stringy, _encode_file, _encode_file_not_really,]: + # for f in [_encode_file,]: + # for f in [_encode_file_not_really, _encode_file_not_really_and_hash, _encode_file, _encode_file_and_hash,]: + # for f in [_encode_data_not_really, _encode_data_easyfec, _encode_data_fec,]: + print "measuring encoding of data with K=%d, M=%d, reporting results in nanoseconds per byte after encoding %d bytes %d times in a row..." % (k, m, SIZE, MAXREPS) + # for f in [_encode_data_fec, _encode_data_not_really]: + for f in [_encode_data_fec]: + def _init_func(size): + return _make_new_rand_data(size, k, m) + for BSIZE in [SIZE]: + results = benchutil.rep_bench(f, n=BSIZE, initfunc=_init_func, MAXREPS=MAXREPS, MAXTIME=None, UNITS_PER_SECOND=1000000000) + print "and now represented in MB/s..." + print + best = results['best'] + mean = results['mean'] + worst = results['worst'] + print "best: % 4.3f MB/sec" % (10**3 / best) + print "mean: % 4.3f MB/sec" % (10**3 / mean) + print "worst: % 4.3f MB/sec" % (10**3 / worst) + +k = K +m = M +for arg in sys.argv: + if arg.startswith('--k='): + k = int(arg[len('--k='):]) + if arg.startswith('--m='): + m = int(arg[len('--m='):]) + +bench(k, m) diff --git a/changelog b/changelog new file mode 100644 index 0000000..d4b56a4 --- /dev/null +++ b/changelog @@ -0,0 +1,47 @@ +Thu Dec 20 13:55:55 MST 2007 zooko@zooko.com + * zfec: silence a warning when compiling on Mac OS X with gcc, and refactor a complicated #define stanza into the shared header file + +Thu Dec 20 13:55:32 MST 2007 zooko@zooko.com + * zfec: setup: include _version.py so that the zfec package has a version number again + +Thu Dec 20 09:33:55 MST 2007 zooko@zooko.com + tagged zfec-1.3.1 + +Thu Dec 20 09:31:13 MST 2007 zooko@zooko.com + * zfec: dual-license under GPL and TGPPL + +Thu Dec 20 09:26:16 MST 2007 zooko@zooko.com + tagged zfec-1.3.0 + +Thu Dec 20 09:25:31 MST 2007 zooko@zooko.com + * zfec: add "changelog" file, which contains descriptions of the darcs patches since the last release that I think are interesting to users + +Thu Dec 20 09:23:41 MST 2007 zooko@zooko.com + * zfec: setup: require setuptools_darcs >= 1.1.0 (fixes problem with building incomplete packages) + +Wed Nov 14 09:44:26 MST 2007 zooko@zooko.com + * zfec: set STRIDE to 8192 after extensive experimentation on my PowerPC G4 867 MHz (256 KB L2 cache) + + +Mon Nov 12 07:58:19 MST 2007 zooko@zooko.com + * zfec: reorder the inner loop to be more cache-friendly + + Loop over this stride of each input block before looping over all strides of + this input block. In theory, this should allow the strides of the input blocks + to remain in cache while we produce all of the output blocks. + + +Sun Nov 11 10:04:44 MST 2007 zooko@zooko.com + * zfec: do encoding within a fixed window of memory in order to be cache friendly + +Tue Nov 13 13:13:52 MST 2007 zooko@zooko.com + * zfec: conditionally-compile the right magic to use alloca() with gcc -mno-cygwin + +Tue Nov 13 13:11:33 MST 2007 zooko@zooko.com + * zfec: setup: fix the spelling of "zfec.fec" package name + +Sun Nov 11 08:50:54 MST 2007 zooko@zooko.com + * zfec: add a TODO note + +Fri Nov 9 11:17:04 MST 2007 zooko@zooko.com + tagged zfec-1.2.0 diff --git a/copyright b/copyright new file mode 100644 index 0000000..b6a4020 --- /dev/null +++ b/copyright @@ -0,0 +1,246 @@ +This package was debianized by Zooko O'Whielacronx zooko@zooko.com on +Mon, 22 June 2009 23:30:00 +0000. + +It was originally downloaded from http://allmydata.org/trac/zfec + +Upstream Author: Zooko O'Whielacronx + +Copyright: + +You may use this package under the GNU General Public License, version +2 or, at your option, any later version. You may use this package +under the Transitive Grace Period Public Licence, version 1.0 or, at +your option, any later version. (You may choose to use this package +under the terms of either licence, at your option.) See the file +COPYING.GPL for the terms of the GNU General Public License, version 2. +See the file COPYING.TGPPL.rst or the text of the TGPPL in this file, +below, for the terms of the Transitive Grace Period Public Licence, +version 1.0. + +------- +The Debian packaging is © 2009 Zooko O'Whielacronx - it is +licensed under the same terms as the zfec source code itself. + +On Debian systems, the complete text of the GNU General Public License +can be found in `/usr/share/common-licenses/GPL'. + +------- +Portions derived from the "fec" software by Luigi Rizzo, et +al., the copyright notice and licence terms of which are included below +for reference. + +fec/fec.[ch] -- forward error correction based on Vandermonde matrices +980614 +(C) 1997-98 Luigi Rizzo (luigi@iet.unipi.it) + +Portions derived from code by Phil Karn (karn@ka9q.ampr.org), +Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari +Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 + +Modifications by Dan Rubenstein (see Modifications.txt for +their description. +Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, +OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +OF SUCH DAMAGE. + +------- + Transitive Grace Period Public Licence ("TGPPL") v. 1.0 + + This Transitive Grace Period Public Licence (the "License") applies to any + original work of authorship (the "Original Work") whose owner (the + "Licensor") has placed the following licensing notice adjacent to the + copyright notice for the Original Work: + + Licensed under the Transitive Grace Period Public Licence version 1.0 + + 1. Grant of Copyright License. Licensor grants You a worldwide, + royalty-free, non-exclusive, sublicensable license, for the duration + of the copyright, to do the following: + a. to reproduce the Original Work in copies, either alone or as part + of a collective work; + b. to translate, adapt, alter, transform, modify, or arrange the + Original Work, thereby creating derivative works ("Derivative + Works") based upon the Original Work; + c. to distribute or communicate copies of the Original Work and + Derivative Works to the public, with the proviso that copies of + Original Work or Derivative Works that You distribute or + communicate shall be licensed under this Transitive Grace Period + Public Licence no later than 12 months after You distributed or + communicated said copies; + d. to perform the Original Work publicly; and + e. to display the Original Work publicly. + 2. Grant of Patent License. Licensor grants You a worldwide, + royalty-free, non-exclusive, sublicensable license, under patent + claims owned or controlled by the Licensor that are embodied in the + Original Work as furnished by the Licensor, for the duration of the + patents, to make, use, sell, offer for sale, have made, and import the + Original Work and Derivative Works. + 3. Grant of Source Code License. The term "Source Code" means the + preferred form of the Original Work for making modifications to it and + all available documentation describing how to modify the Original + Work. Licensor agrees to provide a machine-readable copy of the Source + Code of the Original Work along with each copy of the Original Work + that Licensor distributes. Licensor reserves the right to satisfy this + obligation by placing a machine-readable copy of the Source Code in an + information repository reasonably calculated to permit inexpensive and + convenient access by You for as long as Licensor continues to + distribute the Original Work. + 4. Exclusions From License Grant. Neither the names of Licensor, nor the + names of any contributors to the Original Work, nor any of their + trademarks or service marks, may be used to endorse or promote + products derived from this Original Work without express prior + permission of the Licensor. Except as expressly stated herein, nothing + in this License grants any license to Licensor's trademarks, + copyrights, patents, trade secrets or any other intellectual property. + No patent license is granted to make, use, sell, offer for sale, have + made, or import embodiments of any patent claims other than the + licensed claims defined in Section 2. No license is granted to the + trademarks of Licensor even if such marks are included in the Original + Work. Nothing in this License shall be interpreted to prohibit + Licensor from licensing under terms different from this License any + Original Work that Licensor otherwise would have a right to license. + 5. External Deployment. The term "External Deployment" means the use, + distribution, or communication of the Original Work or Derivative + Works in any way such that the Original Work or Derivative Works may + be used by anyone other than You, whether those works are distributed + or communicated to those persons or made available as an application + intended for use over a network. As an express condition for the + grants of license hereunder, You must treat any External Deployment by + You of the Original Work or a Derivative Work as a distribution under + section 1(c). + 6. Attribution Rights. You must retain, in the Source Code of any + Derivative Works that You create, all copyright, patent, or trademark + notices from the Source Code of the Original Work, as well as any + notices of licensing and any descriptive text identified therein as an + "Attribution Notice." You must cause the Source Code for any + Derivative Works that You create to carry a prominent Attribution + Notice reasonably calculated to inform recipients that You have + modified the Original Work. + 7. Warranty of Provenance and Disclaimer of Warranty. Licensor warrants + that the copyright in and to the Original Work and the patent rights + granted herein by Licensor are owned by the Licensor or are + sublicensed to You under the terms of this License with the permission + of the contributor(s) of those copyrights and patent rights. Except as + expressly stated in the immediately preceding sentence, the Original + Work is provided under this License on an "AS IS" BASIS and WITHOUT + WARRANTY, either express or implied, including, without limitation, + the warranties of non-infringement, merchantability or fitness for a + particular purpose. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL + WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential + part of this License. No license to the Original Work is granted by + this License except under this disclaimer. + 8. Limitation of Liability. Under no circumstances and under no legal + theory, whether in tort (including negligence), contract, or + otherwise, shall the Licensor be liable to anyone for any indirect, + special, incidental, or consequential damages of any character arising + as a result of this License or the use of the Original Work including, + without limitation, damages for loss of goodwill, work stoppage, + computer failure or malfunction, or any and all other commercial + damages or losses. This limitation of liability shall not apply to the + extent applicable law prohibits such limitation. + 9. Acceptance and Termination. If, at any time, You expressly assented to + this License, that assent indicates your clear and irrevocable + acceptance of this License and all of its terms and conditions. If You + distribute or communicate copies of the Original Work or a Derivative + Work, You must make a reasonable effort under the circumstances to + obtain the express assent of recipients to the terms of this License. + This License conditions your rights to undertake the activities listed + in Section 1, including your right to create Derivative Works based + upon the Original Work, and doing so without honoring these terms and + conditions is prohibited by copyright law and international treaty. + Nothing in this License is intended to affect copyright exceptions and + limitations (including 'fair use' or 'fair dealing'). This License + shall terminate immediately and You may no longer exercise any of the + rights granted to You by this License upon your failure to honor the + conditions in Section 1(c). + 10. Termination for Patent Action. This License shall terminate + automatically and You may no longer exercise any of the rights granted + to You by this License as of the date You commence an action, + including a cross-claim or counterclaim, against Licensor or any + licensee alleging that the Original Work infringes a patent. This + termination provision shall not apply for an action alleging patent + infringement by combinations of the Original Work with other software + or hardware. + 11. Jurisdiction, Venue and Governing Law. Any action or suit relating to + this License may be brought only in the courts of a jurisdiction + wherein the Licensor resides or in which Licensor conducts its primary + business, and under the laws of that jurisdiction excluding its + conflict-of-law provisions. The application of the United Nations + Convention on Contracts for the International Sale of Goods is + expressly excluded. Any use of the Original Work outside the scope of + this License or after its termination shall be subject to the + requirements and penalties of copyright or patent law in the + appropriate jurisdiction. This section shall survive the termination + of this License. + 12. Attorneys' Fees. In any action to enforce the terms of this License or + seeking damages relating thereto, the prevailing party shall be + entitled to recover its costs and expenses, including, without + limitation, reasonable attorneys' fees and costs incurred in + connection with such action, including any appeal of such action. This + section shall survive the termination of this License. + 13. Miscellaneous. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. + 14. Definition of "You" in This License. "You" throughout this License, + whether in upper or lower case, means an individual or a legal entity + exercising rights under, and complying with all of the terms of, this + License. For legal entities, "You" includes any entity that controls, + is controlled by, or is under common control with you. For purposes of + this definition, "control" means (i) the power, direct or indirect, to + cause the direction or management of such entity, whether by contract + or otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + 15. Right to Use. You may use the Original Work in all ways not otherwise + restricted or conditioned by this License or by law, and Licensor + promises not to interfere with or be responsible for such uses by You. + 16. Modification of This License. This License is Copyright (c) 2007 Zooko + Wilcox-O'Hearn. Permission is granted to copy, distribute, or + communicate this License without modification. Nothing in this License + permits You to modify this License as applied to the Original Work or + to Derivative Works. However, You may modify the text of this License + and copy, distribute or communicate your modified version (the + "Modified License") and apply it to other original works of authorship + subject to the following conditions: (i) You may not indicate in any + way that your Modified License is the "Transitive Grace Period Public + Licence" or "TGPPL" and you may not use those names in the name of + your Modified License; and (ii) You must replace the notice specified + in the first paragraph above with the notice "Licensed under " or with + a notice of your own that is not confusingly similar to the notice in + this License. + +------- +Regarding the text "License: BSD3" which appears in +haskell/Codec/FEC.hs: + +On 2008-01-20, Zooko O'Whielacronx wrote: "Will you please give me +rights to use your patches however I like? If you do so, I will +immediately publish your patches under the GPL and under the TGPPL, but +I will also reserve the right to use your patches however I like, or +more probably to delegate that right to allmydata.com." + +Adam Langley replied: "Certainly. I would generally say that GPL is a +little strict for this sort of thing (LGPL would be better), but I put +zfec on Hackage (the Haskell CPAN) with a GPL tag. You may do with the +Haskell code as you wish, including relicensing etc." diff --git a/fec.cabal b/fec.cabal new file mode 100644 index 0000000..a0668eb --- /dev/null +++ b/fec.cabal @@ -0,0 +1,30 @@ +name: fec +version: 0.1.1 +license: GPL +license-file: README.rst +author: Adam Langley +maintainer: Adam Langley +description: This code, based on zfec by Zooko, based on code by Luigi + Rizzo implements an erasure code, or forward error + correction code. The most widely known example of an erasure + code is the RAID-5 algorithm which makes it so that in the + event of the loss of any one hard drive, the stored data can + be completely recovered. The algorithm in the zfec package + has a similar effect, but instead of recovering from the loss + of only a single element, it can be parameterized to choose in + advance the number of elements whose loss it can tolerate. +build-type: Simple +homepage: http://allmydata.org/source/zfec +synopsis: Forward error correction of ByteStrings +category: Codec +build-depends: base, bytestring>=0.9 +stability: provisional +tested-with: GHC == 6.8.2 +exposed-modules: Codec.FEC +extensions: ForeignFunctionInterface +hs-source-dirs: haskell +ghc-options: -Wall +c-sources: zfec/fec.c +cc-options: -std=c99 +include-dirs: zfec +extra-source-files: zfec/fec.h, COPYING.GPL, COPYING.TGPPL.rst diff --git a/haskell/Codec/FEC.hs b/haskell/Codec/FEC.hs new file mode 100644 index 0000000..a389eff --- /dev/null +++ b/haskell/Codec/FEC.hs @@ -0,0 +1,246 @@ +{-# LANGUAGE ForeignFunctionInterface, EmptyDataDecls #-} +-- | +-- Module: Codec.FEC +-- Copyright: Adam Langley +-- License: GPLv2+|TGPPLv1+ (see README.rst for details) +-- +-- Stability: experimental +-- +-- The module provides k of n encoding - a way to generate (n - k) secondary +-- blocks of data from k primary blocks such that any k blocks (primary or +-- secondary) are sufficient to regenerate all blocks. +-- +-- All blocks must be the same length and you need to keep track of which +-- blocks you have in order to tell decode. By convention, the blocks are +-- numbered 0..(n - 1) and blocks numbered < k are the primary blocks. + +module Codec.FEC ( + FECParams + , fec + , encode + , decode + + -- * Utility functions + , secureDivide + , secureCombine + , enFEC + , deFEC + ) where + +import qualified Data.ByteString as B +import qualified Data.ByteString.Unsafe as BU +import qualified Data.ByteString.Internal as BI +import Data.Word (Word8) +import Data.Bits (xor) +import Data.List (sortBy, partition, (\\), nub) +import Foreign.Ptr +import Foreign.Storable (sizeOf, poke) +import Foreign.ForeignPtr +import Foreign.C.Types +import Foreign.Marshal.Alloc +import Foreign.Marshal.Array (withArray, advancePtr) +import System.IO (withFile, IOMode(..)) +import System.IO.Unsafe (unsafePerformIO) + +data CFEC +data FECParams = FECParams (ForeignPtr CFEC) Int Int + +instance Show FECParams where + show (FECParams _ k n) = "FEC (" ++ show k ++ ", " ++ show n ++ ")" + +foreign import ccall unsafe "fec_new" _new :: CUInt -- ^ k + -> CUInt -- ^ n + -> IO (Ptr CFEC) +foreign import ccall unsafe "&fec_free" _free :: FunPtr (Ptr CFEC -> IO ()) +foreign import ccall unsafe "fec_encode" _encode :: Ptr CFEC + -> Ptr (Ptr Word8) -- ^ primary blocks + -> Ptr (Ptr Word8) -- ^ (output) secondary blocks + -> Ptr CUInt -- ^ array of secondary block ids + -> CSize -- ^ length of previous + -> CSize -- ^ block length + -> IO () +foreign import ccall unsafe "fec_decode" _decode :: Ptr CFEC + -> Ptr (Ptr Word8) -- ^ input blocks + -> Ptr (Ptr Word8) -- ^ output blocks + -> Ptr CUInt -- ^ array of input indexes + -> CSize -- ^ block length + -> IO () + +-- | Return true if the given @k@ and @n@ values are valid +isValidConfig :: Int -> Int -> Bool +isValidConfig k n + | k >= n = False + | k < 1 = False + | n < 1 = False + | n > 255 = False + | otherwise = True + +-- | Return a FEC with the given parameters. +fec :: Int -- ^ the number of primary blocks + -> Int -- ^ the total number blocks, must be < 256 + -> FECParams +fec k n = + if not (isValidConfig k n) + then error $ "Invalid FEC parameters: " ++ show k ++ " " ++ show n + else unsafePerformIO (do + cfec <- _new (fromIntegral k) (fromIntegral n) + params <- newForeignPtr _free cfec + return $ FECParams params k n) + +-- | Create a C array of unsigned from an input array +uintCArray :: [Int] -> ((Ptr CUInt) -> IO a) -> IO a +uintCArray xs f = withArray (map fromIntegral xs) f + +-- | Convert a list of ByteStrings to an array of pointers to their data +byteStringsToArray :: [B.ByteString] -> ((Ptr (Ptr Word8)) -> IO a) -> IO a +byteStringsToArray inputs f = do + let l = length inputs + allocaBytes (l * sizeOf (undefined :: Ptr Word8)) (\array -> do + let inner _ [] = f array + inner array' (bs : bss) = BU.unsafeUseAsCString bs (\ptr -> do + poke array' $ castPtr ptr + inner (advancePtr array' 1) bss) + inner array inputs) + +-- | Return True iff all the given ByteStrings are the same length +allByteStringsSameLength :: [B.ByteString] -> Bool +allByteStringsSameLength [] = True +allByteStringsSameLength (bs : bss) = all ((==) (B.length bs)) $ map B.length bss + +-- | Run the given function with a pointer to an array of @n@ pointers to +-- buffers of size @size@. Return these buffers as a list of ByteStrings +createByteStringArray :: Int -- ^ the number of buffers requested + -> Int -- ^ the size of each buffer + -> ((Ptr (Ptr Word8)) -> IO ()) + -> IO [B.ByteString] +createByteStringArray n size f = do + allocaBytes (n * sizeOf (undefined :: Ptr Word8)) (\array -> do + allocaBytes (n * size) (\ptr -> do + mapM_ (\i -> poke (advancePtr array i) (advancePtr ptr (size * i))) [0..(n - 1)] + f array + mapM (\i -> B.packCStringLen (castPtr $ advancePtr ptr (i * size), size)) [0..(n - 1)])) + +-- | Generate the secondary blocks from a list of the primary blocks. The +-- primary blocks must be in order and all of the same size. There must be +-- @k@ primary blocks. +encode :: FECParams + -> [B.ByteString] -- ^ a list of @k@ input blocks + -> [B.ByteString] -- ^ (n - k) output blocks +encode (FECParams params k n) inblocks + | length inblocks /= k = error "Wrong number of blocks to FEC encode" + | not (allByteStringsSameLength inblocks) = error "Not all inputs to FEC encode are the same length" + | otherwise = unsafePerformIO (do + let sz = B.length $ head inblocks + withForeignPtr params (\cfec -> do + byteStringsToArray inblocks (\src -> do + createByteStringArray (n - k) sz (\fecs -> do + uintCArray [k..(n - 1)] (\block_nums -> do + _encode cfec src fecs block_nums (fromIntegral (n - k)) $ fromIntegral sz))))) + +-- | A sort function for tagged assoc lists +sortTagged :: [(Int, a)] -> [(Int, a)] +sortTagged = sortBy (\a b -> compare (fst a) (fst b)) + +-- | Reorder the given list so that elements with tag numbers < the first +-- argument have an index equal to their tag number (if possible) +reorderPrimaryBlocks :: Int -> [(Int, a)] -> [(Int, a)] +reorderPrimaryBlocks n blocks = inner (sortTagged pBlocks) sBlocks [] where + (pBlocks, sBlocks) = partition (\(tag, _) -> tag < n) blocks + inner [] sBlocks acc = acc ++ sBlocks + inner pBlocks [] acc = acc ++ pBlocks + inner pBlocks@((tag, a) : ps) sBlocks@(s : ss) acc = + if length acc == tag + then inner ps sBlocks (acc ++ [(tag, a)]) + else inner pBlocks ss (acc ++ [s]) + +-- | Recover the primary blocks from a list of @k@ blocks. Each block must be +-- tagged with its number (see the module comments about block numbering) +decode :: FECParams + -> [(Int, B.ByteString)] -- ^ a list of @k@ blocks and their index + -> [B.ByteString] -- ^ a list the @k@ primary blocks +decode (FECParams params k n) inblocks + | length (nub $ map fst inblocks) /= length (inblocks) = error "Duplicate input blocks in FEC decode" + | any (\f -> f < 0 || f >= n) $ map fst inblocks = error "Invalid block numbers in FEC decode" + | length inblocks /= k = error "Wrong number of blocks to FEC decode" + | not (allByteStringsSameLength $ map snd inblocks) = error "Not all inputs to FEC decode are same length" + | otherwise = unsafePerformIO (do + let sz = B.length $ snd $ head inblocks + inblocks' = reorderPrimaryBlocks k inblocks + presentBlocks = map fst inblocks' + withForeignPtr params (\cfec -> do + byteStringsToArray (map snd inblocks') (\src -> do + b <- createByteStringArray (n - k) sz (\out -> do + uintCArray presentBlocks (\block_nums -> do + _decode cfec src out block_nums $ fromIntegral sz)) + let blocks = [0..(n - 1)] \\ presentBlocks + tagged = zip blocks b + allBlocks = sortTagged $ tagged ++ inblocks' + return $ take k $ map snd allBlocks))) + +-- | Break a ByteString into @n@ parts, equal in length to the original, such +-- that all @n@ are required to reconstruct the original, but having less +-- than @n@ parts reveals no information about the orginal. +-- +-- This code works in IO monad because it needs a source of random bytes, +-- which it gets from /dev/urandom. If this file doesn't exist an +-- exception results +-- +-- Not terribly fast - probably best to do it with short inputs (e.g. an +-- encryption key) +secureDivide :: Int -- ^ the number of parts requested + -> B.ByteString -- ^ the data to be split + -> IO [B.ByteString] +secureDivide n input + | n < 0 = error "secureDivide called with negative number of parts" + | otherwise = withFile "/dev/urandom" ReadMode (\handle -> do + let inner 1 bs = return [bs] + inner n bs = do + mask <- B.hGet handle (B.length bs) + let masked = B.pack $ B.zipWith xor bs mask + rest <- inner (n - 1) masked + return (mask : rest) + inner n input) + +-- | Reverse the operation of secureDivide. The order of the inputs doesn't +-- matter, but they must all be the same length +secureCombine :: [B.ByteString] -> B.ByteString +secureCombine [] = error "Passed empty list of inputs to secureCombine" +secureCombine [a] = a +secureCombine [a, b] = B.pack $ B.zipWith xor a b +secureCombine (a : rest) = B.pack $ B.zipWith xor a $ secureCombine rest + +-- | A utility function which takes an arbitary input and FEC encodes it into a +-- number of blocks. The order the resulting blocks doesn't matter so long +-- as you have enough to present to @deFEC@. +enFEC :: Int -- ^ the number of blocks required to reconstruct + -> Int -- ^ the total number of blocks + -> B.ByteString -- ^ the data to divide + -> [B.ByteString] -- ^ the resulting blocks +enFEC k n input = taggedPrimaryBlocks ++ taggedSecondaryBlocks where + taggedPrimaryBlocks = map (uncurry B.cons) $ zip [0..] primaryBlocks + taggedSecondaryBlocks = map (uncurry B.cons) $ zip [(fromIntegral k)..] secondaryBlocks + remainder = B.length input `mod` k + paddingLength = if remainder >= 1 then (k - remainder) else k + paddingBytes = (B.replicate (paddingLength - 1) 0) `B.append` (B.singleton $ fromIntegral paddingLength) + divide a bs + | B.null bs = [] + | otherwise = (B.take a bs) : (divide a $ B.drop a bs) + input' = input `B.append` paddingBytes + blockSize = B.length input' `div` k + primaryBlocks = divide blockSize input' + secondaryBlocks = encode params primaryBlocks + params = fec k n + +-- | Reverses the operation of @enFEC@. +deFEC :: Int -- ^ the number of blocks required (matches call to @enFEC@) + -> Int -- ^ the total number of blocks (matches call to @enFEC@) + -> [B.ByteString] -- ^ a list of k, or more, blocks from @enFEC@ + -> B.ByteString +deFEC k n inputs + | length inputs < k = error "Too few inputs to deFEC" + | otherwise = B.take (B.length fecOutput - paddingLength) fecOutput where + paddingLength = fromIntegral $ B.last fecOutput + inputs' = take k inputs + taggedInputs = map (\bs -> (fromIntegral $ B.head bs, B.tail bs)) inputs' + fecOutput = B.concat $ decode params taggedInputs + params = fec k n diff --git a/haskell/test/FECTest.hs b/haskell/test/FECTest.hs new file mode 100644 index 0000000..0f5a353 --- /dev/null +++ b/haskell/test/FECTest.hs @@ -0,0 +1,57 @@ +module Main where + +import qualified Data.ByteString as B +import qualified Codec.FEC as FEC +import System.IO (withFile, IOMode(..)) +import System.Random +import Data.List (sortBy) + +import Test.QuickCheck + +-- | Return true if the given @k@ and @n@ values are valid +isValidConfig :: Int -> Int -> Bool +isValidConfig k n + | k >= n = False + | k < 1 = False + | n < 1 = False + | otherwise = True + +randomTake :: Int -> Int -> [a] -> [a] +randomTake seed n values = map snd $ take n sortedValues where + sortedValues = sortBy (\a b -> compare (fst a) (fst b)) taggedValues + taggedValues = zip rnds values + rnds :: [Float] + rnds = randoms gen + gen = mkStdGen seed + +testFEC k n len seed = FEC.decode fec someTaggedBlocks == origBlocks where + origBlocks = map (\i -> B.replicate len $ fromIntegral i) [0..(k - 1)] + fec = FEC.fec k n + secondaryBlocks = FEC.encode fec origBlocks + taggedBlocks = zip [0..] (origBlocks ++ secondaryBlocks) + someTaggedBlocks = randomTake seed k taggedBlocks + +prop_FEC :: Int -> Int -> Int -> Int -> Property +prop_FEC k n len seed = + isValidConfig k n && n < 256 && len < 1024 ==> testFEC k n len seed + +checkDivide :: Int -> IO () +checkDivide n = do + let input = B.replicate 1024 65 + parts <- FEC.secureDivide n input + if FEC.secureCombine parts == input + then return () + else fail "checkDivide failed" + +checkEnFEC :: Int -> IO () +checkEnFEC len = do + testdata <- withFile "/dev/urandom" ReadMode (\handle -> B.hGet handle len) + let [a, b, c, d, e] = FEC.enFEC 3 5 testdata + if FEC.deFEC 3 5 [b, e, d] == testdata + then return () + else fail "deFEC failure" + +main = do + mapM_ (check (defaultConfig { configMaxTest = 1000, configMaxFail = 10000 })) [prop_FEC] + mapM_ checkDivide [1, 2, 3, 4, 10] + mapM_ checkEnFEC [1, 2, 3, 4, 5, 1024 * 1024] diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..620b349 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,7 @@ +[easy_install] +# zfec actually does work at least as well as any package +# works when zipped, but zipping eggs causes various problems +# (http://bugs.python.org/setuptools/issue33 ), and generally makes it +# harder for people to get at the source code, and doesn't actually +# provide any benefits that I am aware of. +zip_ok=False diff --git a/setup.py b/setup.py new file mode 100755 index 0000000..8a1a5b3 --- /dev/null +++ b/setup.py @@ -0,0 +1,199 @@ + +# zfec -- fast forward error correction library with Python interface +# +# copyright © 2007-2013 Zooko Wilcox-O'Hearn +# +# This file is part of zfec. +# +# See README.rst for licensing information. + +import glob, os, re, sys + +egg = os.path.realpath(glob.glob('setuptools-*.egg')[0]) +sys.path.insert(0, egg) +import setuptools; setuptools.bootstrap_install_from = egg + +from setuptools import Extension, find_packages, setup + +if "--debug" in sys.argv: + DEBUGMODE=True + sys.argv.remove("--debug") +else: + DEBUGMODE=False + +extra_compile_args=[] +extra_link_args=[] + +extra_compile_args.append("-std=c99") + +define_macros=[] +undef_macros=[] + +for arg in sys.argv: + if arg.startswith("--stride="): + stride = int(arg[len("--stride="):]) + define_macros.append(('STRIDE', stride)) + sys.argv.remove(arg) + break + +if DEBUGMODE: + extra_compile_args.append("-O0") + extra_compile_args.append("-g") + extra_compile_args.append("-Wall") + extra_link_args.append("-g") + undef_macros.append('NDEBUG') + +trove_classifiers=[ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "License :: OSI Approved :: GNU General Public License (GPL)", # See README.rst for alternative licensing. + "License :: DFSG approved", + "License :: Other/Proprietary License", + "Intended Audience :: Developers", + "Intended Audience :: End Users/Desktop", + "Intended Audience :: System Administrators", + "Operating System :: Microsoft", + "Operating System :: Microsoft :: Windows", + "Operating System :: Unix", + "Operating System :: POSIX :: Linux", + "Operating System :: POSIX", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows :: Windows NT/2000", + "Operating System :: OS Independent", + "Natural Language :: English", + "Programming Language :: C", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.4", + "Programming Language :: Python :: 2.5", + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Topic :: Utilities", + "Topic :: System :: Systems Administration", + "Topic :: System :: Filesystems", + "Topic :: System :: Distributed Computing", + "Topic :: Software Development :: Libraries", + "Topic :: Communications :: Usenet News", + "Topic :: System :: Archiving :: Backup", + "Topic :: System :: Archiving :: Mirroring", + "Topic :: System :: Archiving", + ] + +PKG = "zfec" +VERSIONFILE = os.path.join(PKG, "_version.py") +verstr = "unknown" +try: + verstrline = open(VERSIONFILE, "rt").read() +except EnvironmentError: + pass # Okay, there is no version file. +else: + VSRE = r"^verstr = ['\"]([^'\"]*)['\"]" + mo = re.search(VSRE, verstrline, re.M) + if mo: + verstr = mo.group(1) + else: + print "unable to find version in %s" % (VERSIONFILE,) + raise RuntimeError("if %s.py exists, it is required to be well-formed" % (VERSIONFILE,)) + +setup_requires = [] +tests_require = [] + +tests_require.append("pyutil >= 1.3.19") + +# darcsver is needed only if you want "./setup.py darcsver" to write a new +# version stamp in pyutil/_version.py, with a version number derived from +# darcs history. http://pypi.python.org/pypi/darcsver +if 'darcsver' in sys.argv[1:]: + setup_requires.append('darcsver >= 1.0.0') + +# setuptools_darcs is required to produce complete distributions (such +# as with "sdist" or "bdist_egg"), unless there is a +# zfec.egg-info/SOURCE.txt file present which contains a complete +# list of files that should be included. +# http://pypi.python.org/pypi/setuptools_darcs + +# However, requiring it runs afoul of a bug in Distribute, which was +# shipped in Ubuntu Lucid, so for now you have to manually install it +# before building sdists or eggs: +# http://bitbucket.org/tarek/distribute/issue/55/revision-control-plugin-automatically-installed-as-a-build-dependency-is-not-present-when-another-build-dependency-is-being +if False: + setup_requires.append('setuptools_darcs >= 1.1.0') + + +# setuptools_trial is needed if you want "./setup.py trial" or +# "./setup.py test" to execute the tests. +# http://pypi.python.org/pypi/setuptools_trial +if 'trial' in sys.argv[1:]: + setup_requires.extend(['setuptools_trial >= 0.5']) + +# trialcoverage is required if you want the "trial" unit test runner to have a +# "--reporter=bwverbose-coverage" option which produces code-coverage results. +if "--reporter=bwverbose-coverage" in sys.argv: + tests_require.append('trialcoverage >= 0.3.3') + tests_require.append('twisted >= 2.4.0') + tests_require.append('setuptools_trial >= 0.5') + +# stdeb is required to build Debian dsc files. +if "sdist_dsc" in sys.argv: + setup_requires.append('stdeb') + +data_fnames=[ 'COPYING.GPL', 'changelog', 'COPYING.TGPPL.rst', 'TODO', 'README.rst' ] + +# In case we are building for a .deb with stdeb's sdist_dsc command, we put the +# docs in "share/doc/$PKG". +doc_loc = "share/doc/" + PKG +data_files = [(doc_loc, data_fnames)] + +readmetext = open('README.rst').read() +if readmetext[:3] == '\xef\xbb\xbf': + # utf-8 "BOM" + readmetext = readmetext[3:] + +try: + readmetext = readmetext.decode('utf-8') +except UnicodeDecodeError: + pass + +install_requires=["pyutil >= 1.3.19"] + +# argparse comes built into Python >= 2.7, and is provided by the "argparse" +# distribution for earlier versions of Python. +try: + import argparse + argparse # hush pyflakes +except ImportError: + install_requires.append("argparse >= 0.8") + +# distutils in Python 2.4 has a bug in that it tries to encode the long +# description into ascii. We detect the resulting exception and try again +# after squashing the long description (lossily) into ascii. + +def _setup(longdescription): + setup(name=PKG, + version=verstr, + description='a fast erasure codec which can be used with the command-line, C, Python, or Haskell', + long_description=longdescription, + author='Zooko O\'Whielacronx', + author_email='zooko@zooko.com', + url='https://tahoe-lafs.org/trac/'+PKG, + license='GNU GPL', # See README.rst for alternative licensing. + install_requires=install_requires, + tests_require=tests_require, + packages=find_packages(), + include_package_data=True, + data_files=data_files, + setup_requires=setup_requires, + classifiers=trove_classifiers, + entry_points = { 'console_scripts': [ 'zfec = %s.cmdline_zfec:main' % PKG, 'zunfec = %s.cmdline_zunfec:main' % PKG ] }, + ext_modules=[Extension(PKG+'._fec', [PKG+'/fec.c', PKG+'/_fecmodule.c',], extra_link_args=extra_link_args, extra_compile_args=extra_compile_args, undef_macros=undef_macros, define_macros=define_macros),], + test_suite=PKG+".test", + zip_safe=False, # I prefer unzipped for easier access. + extras_require={ + 'ed25519=ba95497adf4db8e17f688c0979003c48c76897d60e2d2193f938b9ab62115f59':[], + }, + ) + +try: + _setup(readmetext) +except UnicodeEncodeError: + _setup(repr(readmetext)) diff --git a/setuptools-0.6c16dev3.egg/EGG-INFO/PKG-INFO b/setuptools-0.6c16dev3.egg/EGG-INFO/PKG-INFO new file mode 100644 index 0000000..c9df489 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,100 @@ +Metadata-Version: 1.0 +Name: setuptools +Version: 0.6c16dev3 +Summary: Download, build, install, upgrade, and uninstall Python packages -- easily! (zetuptoolz fork) +Home-page: http://pypi.python.org/pypi/setuptools +Author: Phillip J. Eby +Author-email: distutils-sig@python.org +License: PSF or ZPL +Description: ====================== + This is not Setuptools + ====================== + + This is the ``zetuptoolz`` fork of setuptools, which is used to install + `Tahoe-LAFS`_. It has a `darcs source repository`_ and `issue tracker`_. + + For a list of differences between this fork and setuptools, see zetuptoolz.txt. + + Note that, to avoid interfering with any setuptools installation, zetuptoolz + does not install a script called ``easy_install``. There is an ``easy_install_z`` + script, but that is intended only for developers to test differences between + setuptools and zetuptoolz. + + .. _Tahoe-LAFS: http://tahoe-lafs.org/ + .. _darcs source repository: http://tahoe-lafs.org/source/zetuptoolz/trunk + .. _issue tracker: http://tahoe-lafs.org/trac/zetuptoolz + + + -------------------------------- + Using Setuptools and EasyInstall + -------------------------------- + + Here are some of the available manuals, tutorials, and other resources for + learning about Setuptools, Python Eggs, and EasyInstall: + + * `The EasyInstall user's guide and reference manual`_ + * `The setuptools Developer's Guide`_ + * `The pkg_resources API reference`_ + * `Package Compatibility Notes`_ (user-maintained) + * `The Internal Structure of Python Eggs`_ + + Questions, comments, and bug reports should be directed to the `distutils-sig + mailing list`_. If you have written (or know of) any tutorials, documentation, + plug-ins, or other resources for setuptools users, please let us know about + them there, so this reference list can be updated. If you have working, + *tested* patches to correct problems or add features, you may submit them to + the `setuptools bug tracker`_. + + .. _setuptools bug tracker: http://bugs.python.org/setuptools/ + .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes + .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats + .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools + .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources + .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall + .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ + + + ------- + Credits + ------- + + * The original design for the ``.egg`` format and the ``pkg_resources`` API was + co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first + version of ``pkg_resources``, and supplied the OS X operating system version + compatibility algorithm. + + * Ian Bicking implemented many early "creature comfort" features of + easy_install, including support for downloading via Sourceforge and + Subversion repositories. Ian's comments on the Web-SIG about WSGI + application deployment also inspired the concept of "entry points" in eggs, + and he has given talks at PyCon and elsewhere to inform and educate the + community about eggs and setuptools. + + * Jim Fulton contributed time and effort to build automated tests of various + aspects of ``easy_install``, and supplied the doctests for the command-line + ``.exe`` wrappers on Windows. + + * Phillip J. Eby is the principal author and maintainer of setuptools, and + first proposed the idea of an importable binary distribution format for + Python application plug-ins. + + * Significant parts of the implementation of setuptools were funded by the Open + Source Applications Foundation, to provide a plug-in infrastructure for the + Chandler PIM application. In addition, many OSAF staffers (such as Mike + "Code Bear" Taylor) contributed their time and stress as guinea pigs for the + use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) + + .. _files: + +Keywords: CPAN PyPI distutils eggs package management +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities diff --git a/setuptools-0.6c16dev3.egg/EGG-INFO/SOURCES.txt b/setuptools-0.6c16dev3.egg/EGG-INFO/SOURCES.txt new file mode 100644 index 0000000..07bd4ff --- /dev/null +++ b/setuptools-0.6c16dev3.egg/EGG-INFO/SOURCES.txt @@ -0,0 +1,44 @@ +README.txt +easy_install.py +pkg_resources.py +setup.cfg +setup.py +setuptools/__init__.py +setuptools/archive_util.py +setuptools/depends.py +setuptools/dist.py +setuptools/extension.py +setuptools/package_index.py +setuptools/sandbox.py +setuptools/site-patch.py +setuptools.egg-info/PKG-INFO +setuptools.egg-info/SOURCES.txt +setuptools.egg-info/dependency_links.txt +setuptools.egg-info/entry_points.txt +setuptools.egg-info/top_level.txt +setuptools.egg-info/zip-safe +setuptools/command/__init__.py +setuptools/command/alias.py +setuptools/command/bdist_egg.py +setuptools/command/bdist_rpm.py +setuptools/command/bdist_wininst.py +setuptools/command/build_ext.py +setuptools/command/build_py.py +setuptools/command/develop.py +setuptools/command/easy_install.py +setuptools/command/egg_info.py +setuptools/command/install.py +setuptools/command/install_egg_info.py +setuptools/command/install_lib.py +setuptools/command/install_scripts.py +setuptools/command/register.py +setuptools/command/rotate.py +setuptools/command/saveopts.py +setuptools/command/scriptsetup.py +setuptools/command/sdist.py +setuptools/command/setopt.py +setuptools/command/test.py +setuptools/command/upload.py +setuptools/tests/__init__.py +setuptools/tests/test_packageindex.py +setuptools/tests/test_resources.py \ No newline at end of file diff --git a/setuptools-0.6c16dev3.egg/EGG-INFO/dependency_links.txt b/setuptools-0.6c16dev3.egg/EGG-INFO/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/EGG-INFO/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/setuptools-0.6c16dev3.egg/EGG-INFO/entry_points.txt b/setuptools-0.6c16dev3.egg/EGG-INFO/entry_points.txt new file mode 100644 index 0000000..0a31ba0 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/EGG-INFO/entry_points.txt @@ -0,0 +1,59 @@ +[distutils.commands] +bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm +rotate = setuptools.command.rotate:rotate +develop = setuptools.command.develop:develop +setopt = setuptools.command.setopt:setopt +build_py = setuptools.command.build_py:build_py +scriptsetup = setuptools.command.scriptsetup:scriptsetup +saveopts = setuptools.command.saveopts:saveopts +egg_info = setuptools.command.egg_info:egg_info +register = setuptools.command.register:register +install_egg_info = setuptools.command.install_egg_info:install_egg_info +alias = setuptools.command.alias:alias +easy_install = setuptools.command.easy_install:easy_install +install_scripts = setuptools.command.install_scripts:install_scripts +bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst +bdist_egg = setuptools.command.bdist_egg:bdist_egg +install = setuptools.command.install:install +test = setuptools.command.test:test +install_lib = setuptools.command.install_lib:install_lib +build_ext = setuptools.command.build_ext:build_ext +sdist = setuptools.command.sdist:sdist + +[egg_info.writers] +dependency_links.txt = setuptools.command.egg_info:overwrite_arg +requires.txt = setuptools.command.egg_info:write_requirements +PKG-INFO = setuptools.command.egg_info:write_pkg_info +eager_resources.txt = setuptools.command.egg_info:overwrite_arg +top_level.txt = setuptools.command.egg_info:write_toplevel_names +namespace_packages.txt = setuptools.command.egg_info:overwrite_arg +entry_points.txt = setuptools.command.egg_info:write_entries +depends.txt = setuptools.command.egg_info:warn_depends_obsolete + +[console_scripts] +easy_install_z-2.6 = setuptools.command.easy_install:main +easy_install_z = setuptools.command.easy_install:main + +[setuptools.file_finders] +svn_cvs = setuptools.command.sdist:_default_revctrl + +[distutils.setup_keywords] +dependency_links = setuptools.dist:assert_string_list +entry_points = setuptools.dist:check_entry_points +extras_require = setuptools.dist:check_extras +test_runner = setuptools.dist:check_importable +package_data = setuptools.dist:check_package_data +install_requires = setuptools.dist:check_requirements +include_package_data = setuptools.dist:assert_bool +exclude_package_data = setuptools.dist:check_package_data +namespace_packages = setuptools.dist:check_nsp +test_suite = setuptools.dist:check_test_suite +eager_resources = setuptools.dist:assert_string_list +zip_safe = setuptools.dist:assert_bool +test_loader = setuptools.dist:check_importable +packages = setuptools.dist:check_packages +tests_require = setuptools.dist:check_requirements + +[setuptools.installation] +eggsecutable = setuptools.command.easy_install:bootstrap + diff --git a/setuptools-0.6c16dev3.egg/EGG-INFO/top_level.txt b/setuptools-0.6c16dev3.egg/EGG-INFO/top_level.txt new file mode 100644 index 0000000..4577c6a --- /dev/null +++ b/setuptools-0.6c16dev3.egg/EGG-INFO/top_level.txt @@ -0,0 +1,3 @@ +easy_install +pkg_resources +setuptools diff --git a/setuptools-0.6c16dev3.egg/EGG-INFO/zip-safe b/setuptools-0.6c16dev3.egg/EGG-INFO/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/EGG-INFO/zip-safe @@ -0,0 +1 @@ + diff --git a/setuptools-0.6c16dev3.egg/easy_install.py b/setuptools-0.6c16dev3.egg/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/setuptools-0.6c16dev3.egg/pkg_resources.py b/setuptools-0.6c16dev3.egg/pkg_resources.py new file mode 100644 index 0000000..00d9722 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/pkg_resources.py @@ -0,0 +1,2653 @@ +"""Package resource API +-------------------- + +A resource is a logical file contained within a package, or a logical +subdirectory thereof. The package resource API expects resource names +to have their path parts separated with ``/``, *not* whatever the local +path separator is. Do not use os.path operations to manipulate resource +names being passed into the API. + +The package resource API is designed to work with normal filesystem packages, +.egg files, and unpacked .egg files. It can also work in a limited way with +.zip files and with custom PEP 302 loaders that support the ``get_data()`` +method. +""" + +import sys, os, zipimport, time, re, imp + +try: + frozenset +except NameError: + from sets import ImmutableSet as frozenset + +# capture these to bypass sandboxing +from os import utime, rename, unlink, mkdir +from os import open as os_open +from os.path import isdir, split + +def _bypass_ensure_directory(name, mode=0777): + # Sandbox-bypassing version of ensure_directory() + dirname, filename = split(name) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + mkdir(dirname, mode) + + + + + + + + +_state_vars = {} + +def _declare_state(vartype, **kw): + g = globals() + for name, val in kw.iteritems(): + g[name] = val + _state_vars[name] = vartype + +def __getstate__(): + state = {} + g = globals() + for k, v in _state_vars.iteritems(): + state[k] = g['_sget_'+v](g[k]) + return state + +def __setstate__(state): + g = globals() + for k, v in state.iteritems(): + g['_sset_'+_state_vars[k]](k, g[k], v) + return state + +def _sget_dict(val): + return val.copy() + +def _sset_dict(key, ob, state): + ob.clear() + ob.update(state) + +def _sget_object(val): + return val.__getstate__() + +def _sset_object(key, ob, state): + ob.__setstate__(state) + +_sget_none = _sset_none = lambda *args: None + + + + + + +def get_supported_platform(): + """Return this platform's maximum compatible version. + + distutils.util.get_platform() normally reports the minimum version + of Mac OS X that would be required to *use* extensions produced by + distutils. But what we want when checking compatibility is to know the + version of Mac OS X that we are *running*. To allow usage of packages that + explicitly require a newer version of Mac OS X, we must also know the + current version of the OS. + + If this condition occurs for any other platform with a version in its + platform strings, this function should be extended accordingly. + """ + plat = get_build_platform(); m = macosVersionString.match(plat) + if m is not None and sys.platform == "darwin": + try: + plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) + except ValueError: + pass # not Mac OS X + return plat + + + + + + + + + + + + + + + + + + + + + +__all__ = [ + # Basic resource access and distribution/entry point discovery + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + + # Environmental control + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'get_default_cache', + + # Primary implementation classes + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + + # Exceptions + 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', + 'ExtractionError', + + # Parsing functions and string utilities + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', + + # filesystem utilities + 'ensure_directory', 'normalize_path', + + # Distribution "precedence" constants + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + + # "Provider" interfaces, implementations, and registration/lookup APIs + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + + # Deprecated/backward compatibility only + 'run_main', 'AvailableDistributions', +] +class ResolutionError(Exception): + """Abstract base for dependency resolution errors""" + def __repr__(self): + return self.__class__.__name__+repr(self.args) + +class VersionConflict(ResolutionError): + """An already-installed version conflicts with the requested version""" + +class DistributionNotFound(ResolutionError): + """A requested distribution was not found""" + +class UnknownExtra(ResolutionError): + """Distribution doesn't have an "extra feature" of the given name""" + +_provider_factories = {} +PY_MAJOR = sys.version[:3] +EGG_DIST = 3 +BINARY_DIST = 2 +SOURCE_DIST = 1 +CHECKOUT_DIST = 0 +DEVELOP_DIST = -1 + +def register_loader_type(loader_type, provider_factory): + """Register `provider_factory` to make providers for `loader_type` + + `loader_type` is the type or class of a PEP 302 ``module.__loader__``, + and `provider_factory` is a function that, passed a *module* object, + returns an ``IResourceProvider`` for that module. + """ + _provider_factories[loader_type] = provider_factory + +def get_provider(moduleOrReq): + """Return an IResourceProvider for the named module or requirement""" + if isinstance(moduleOrReq,Requirement): + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] + try: + module = sys.modules[moduleOrReq] + except KeyError: + __import__(moduleOrReq) + module = sys.modules[moduleOrReq] + loader = getattr(module, '__loader__', None) + return _find_adapter(_provider_factories, loader)(module) + +def _macosx_vers(_cache=[]): + if not _cache: + from platform import mac_ver + _cache.append(mac_ver()[0].split('.')) + return _cache[0] + +def _macosx_arch(machine): + return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) + +def get_build_platform(): + """Return this platform's string for platform-specific distributions + + XXX Currently this is the same as ``distutils.util.get_platform()``, but it + needs some hacks for Linux and Mac OS X. + """ + from distutils.util import get_platform + plat = get_platform() + if sys.platform == "darwin" and not plat.startswith('macosx-'): + try: + version = _macosx_vers() + machine = os.uname()[4].replace(" ", "_") + return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), + _macosx_arch(machine)) + except ValueError: + # if someone is running a non-Mac darwin system, this will fall + # through to the default implementation + pass + return plat + +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") +get_platform = get_build_platform # XXX backward compat + + + + + + + +def compatible_platforms(provided,required): + """Can code for the `provided` platform run on the `required` platform? + + Returns true if either platform is ``None``, or the platforms are equal. + + XXX Needs compatibility checks for Linux and other unixy OSes. + """ + if provided is None or required is None or provided==required: + return True # easy case + + # Mac OS X special cases + reqMac = macosVersionString.match(required) + if reqMac: + provMac = macosVersionString.match(provided) + + # is this a Mac package? + if not provMac: + # this is backwards compatibility for packages built before + # setuptools 0.6. All packages built after this point will + # use the new macosx designation. + provDarwin = darwinVersionString.match(provided) + if provDarwin: + dversion = int(provDarwin.group(1)) + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": + + #import warnings + #warnings.warn("Mac eggs should be rebuilt to " + # "use the macosx designation instead of darwin.", + # category=DeprecationWarning) + return True + return False # egg isn't macosx or legacy darwin + + # are they the same major version and machine type? + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): + return False + + + + # is the required OS major update >= the provided one? + if int(provMac.group(2)) > int(reqMac.group(2)): + return False + + return True + + # XXX Linux and other platforms' special cases should go here + return False + + +def run_script(dist_spec, script_name): + """Locate distribution `dist_spec` and run its `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + require(dist_spec)[0].run_script(script_name, ns) + +run_main = run_script # backward compatibility + +def get_distribution(dist): + """Return a current distribution object for a Requirement or string""" + if isinstance(dist,basestring): dist = Requirement.parse(dist) + if isinstance(dist,Requirement): dist = get_provider(dist) + if not isinstance(dist,Distribution): + raise TypeError("Expected string, Requirement, or Distribution", dist) + return dist + +def load_entry_point(dist, group, name): + """Return `name` entry point of `group` for `dist` or raise ImportError""" + return get_distribution(dist).load_entry_point(group, name) + +def get_entry_map(dist, group=None): + """Return the entry point map for `group`, or the full entry map""" + return get_distribution(dist).get_entry_map(group) + +def get_entry_info(dist, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return get_distribution(dist).get_entry_info(group, name) + + +class IMetadataProvider: + + def has_metadata(name): + """Does the package's distribution contain the named metadata?""" + + def get_metadata(name): + """The named metadata resource as a string""" + + def get_metadata_lines(name): + """Yield named metadata resource as list of non-blank non-comment lines + + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" + + def metadata_isdir(name): + """Is the named metadata a directory? (like ``os.path.isdir()``)""" + + def metadata_listdir(name): + """List of metadata names in the directory (like ``os.listdir()``)""" + + def run_script(script_name, namespace): + """Execute the named script in the supplied namespace dictionary""" + + + + + + + + + + + + + + + + + + + +class IResourceProvider(IMetadataProvider): + """An object that provides access to package resources""" + + def get_resource_filename(manager, resource_name): + """Return a true filesystem path for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_stream(manager, resource_name): + """Return a readable file-like object for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_string(manager, resource_name): + """Return a string containing the contents of `resource_name` + + `manager` must be an ``IResourceManager``""" + + def has_resource(resource_name): + """Does the package contain the named resource?""" + + def resource_isdir(resource_name): + """Is the named resource a directory? (like ``os.path.isdir()``)""" + + def resource_listdir(resource_name): + """List of resource names in the directory (like ``os.listdir()``)""" + + + + + + + + + + + + + + + +class WorkingSet(object): + """A collection of active distributions on sys.path (or a similar list)""" + + def __init__(self, entries=None): + """Create working set from list of path entries (default=sys.path)""" + self.entries = [] + self.entry_keys = {} + self.by_key = {} + self.callbacks = [] + + if entries is None: + entries = sys.path + + for entry in entries: + self.add_entry(entry) + + + def add_entry(self, entry): + """Add a path item to ``.entries``, finding any distributions on it + + ``find_distributions(entry, True)`` is used to find distributions + corresponding to the path entry, and they are added. `entry` is + always appended to ``.entries``, even if it is already present. + (This is because ``sys.path`` can contain the same value more than + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always + equal ``sys.path``.) + """ + self.entry_keys.setdefault(entry, []) + self.entries.append(entry) + for dist in find_distributions(entry, True): + self.add(dist, entry, False) + + + def __contains__(self,dist): + """True if `dist` is the active distribution for its project""" + return self.by_key.get(dist.key) == dist + + + + + + def find(self, req): + """Find a distribution matching requirement `req` + + If there is an active distribution for the requested project, this + returns it as long as it meets the version requirement specified by + `req`. But, if there is an active distribution for the project and it + does *not* meet the `req` requirement, ``VersionConflict`` is raised. + If there is no active distribution for the requested project, ``None`` + is returned. + """ + dist = self.by_key.get(req.key) + if dist is not None and dist not in req: + raise VersionConflict(dist,req) # XXX add more info + else: + return dist + + def iter_entry_points(self, group, name=None): + """Yield entry point objects from `group` matching `name` + + If `name` is None, yields all entry points in `group` from all + distributions in the working set, otherwise only ones matching + both `group` and `name` are yielded (in distribution order). + """ + for dist in self: + entries = dist.get_entry_map(group) + if name is None: + for ep in entries.values(): + yield ep + elif name in entries: + yield entries[name] + + def run_script(self, requires, script_name): + """Locate distribution for `requires` and run `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + self.require(requires)[0].run_script(script_name, ns) + + + + def __iter__(self): + """Yield distributions for non-duplicate projects in the working set + + The yield order is the order in which the items' path entries were + added to the working set. + """ + seen = {} + for item in self.entries: + for key in self.entry_keys[item]: + if key not in seen: + seen[key]=1 + yield self.by_key[key] + + def add(self, dist, entry=None, insert=True): + """Add `dist` to working set, associated with `entry` + + If `entry` is unspecified, it defaults to the ``.location`` of `dist`. + On exit from this routine, `entry` is added to the end of the working + set's ``.entries`` (if it wasn't already present). + + `dist` is only added to the working set if it's for a project that + doesn't already have a distribution in the set. If it's added, any + callbacks registered with the ``subscribe()`` method will be called. + """ + if insert: + dist.insert_on(self.entries, entry) + + if entry is None: + entry = dist.location + keys = self.entry_keys.setdefault(entry,[]) + keys2 = self.entry_keys.setdefault(dist.location,[]) + if dist.key in self.by_key: + return # ignore hidden distros + + # If we have a __requires__ then we can already tell if this + # dist is unsatisfactory, in which case we won't add it. + if __requires__ is not None: + for thisreqstr in __requires__: + for thisreq in parse_requirements(thisreqstr): + if thisreq.key == dist.key: + if dist not in thisreq: + return + + + self.by_key[dist.key] = dist + if dist.key not in keys: + keys.append(dist.key) + if dist.key not in keys2: + keys2.append(dist.key) + self._added_new(dist) + + def resolve(self, requirements, env=None, installer=None): + """List all distributions needed to (recursively) meet `requirements` + + `requirements` must be a sequence of ``Requirement`` objects. `env`, + if supplied, should be an ``Environment`` instance. If + not supplied, it defaults to all distributions available within any + entry or distribution in the working set. `installer`, if supplied, + will be invoked with each requirement that cannot be met by an + already-installed distribution; it should return a ``Distribution`` or + ``None``. + """ + + requirements = list(requirements)[::-1] # set up the stack + processed = {} # set of processed requirements + best = {} # key -> dist + to_activate = [] + + while requirements: + req = requirements.pop(0) # process dependencies breadth-first + if req in processed: + # Ignore cyclic or redundant dependencies + continue + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None: + if env is None: + env = Environment(self.entries) + dist = best[req.key] = env.best_match(req, self, installer) + if dist is None: + raise DistributionNotFound(req) # XXX put more info here + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + raise VersionConflict(dist,req) # XXX put more info here + requirements.extend(dist.requires(req.extras)[::-1]) + processed[req] = True + + return to_activate # return list of distros to activate + + def find_plugins(self, + plugin_env, full_env=None, installer=None, fallback=True + ): + """Find all activatable distributions in `plugin_env` + + Example usage:: + + distributions, errors = working_set.find_plugins( + Environment(plugin_dirlist) + ) + map(working_set.add, distributions) # add plugins+libs to sys.path + print "Couldn't load", errors # display errors + + The `plugin_env` should be an ``Environment`` instance that contains + only distributions that are in the project's "plugin directory" or + directories. The `full_env`, if supplied, should be an ``Environment`` + contains all currently-available distributions. If `full_env` is not + supplied, one is created automatically from the ``WorkingSet`` this + method is called on, which will typically mean that every directory on + ``sys.path`` will be scanned for distributions. + + `installer` is a standard installer callback as used by the + ``resolve()`` method. The `fallback` flag indicates whether we should + attempt to resolve older versions of a plugin if the newest version + cannot be resolved. + + This method returns a 2-tuple: (`distributions`, `error_info`), where + `distributions` is a list of the distributions found in `plugin_env` + that were loadable, along with any other distributions that are needed + to resolve their dependencies. `error_info` is a dictionary mapping + unloadable plugin distributions to an exception instance describing the + error that occurred. Usually this will be a ``DistributionNotFound`` or + ``VersionConflict`` instance. + """ + + plugin_projects = list(plugin_env) + plugin_projects.sort() # scan project names in alphabetic order + + error_info = {} + distributions = {} + + if full_env is None: + env = Environment(self.entries) + env += plugin_env + else: + env = full_env + plugin_env + + shadow_set = self.__class__([]) + map(shadow_set.add, self) # put all our entries in shadow_set + + for project_name in plugin_projects: + + for dist in plugin_env[project_name]: + + req = [dist.as_requirement()] + + try: + resolvees = shadow_set.resolve(req, env, installer) + + except ResolutionError,v: + error_info[dist] = v # save error info + if fallback: + continue # try the next older version of project + else: + break # give up on this project, keep going + + else: + map(shadow_set.add, resolvees) + distributions.update(dict.fromkeys(resolvees)) + + # success, no need to try any more versions of this project + break + + distributions = list(distributions) + distributions.sort() + + return distributions, error_info + + + + + + def require(self, *requirements): + """Ensure that distributions matching `requirements` are activated + + `requirements` must be a string or a (possibly-nested) sequence + thereof, specifying the distributions and versions required. The + return value is a sequence of the distributions that needed to be + activated to fulfill the requirements; all relevant distributions are + included, even if they were already activated in this working set. + """ + needed = self.resolve(parse_requirements(requirements)) + + for dist in needed: + self.add(dist) + + return needed + + def subscribe(self, callback): + """Invoke `callback` for all distributions (including existing ones)""" + if callback in self.callbacks: + return + self.callbacks.append(callback) + for dist in self: + callback(dist) + + def _added_new(self, dist): + for callback in self.callbacks: + callback(dist) + + def __getstate__(self): + return ( + self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.callbacks[:] + ) + + def __setstate__(self, (entries, keys, by_key, callbacks)): + self.entries = entries[:] + self.entry_keys = keys.copy() + self.by_key = by_key.copy() + self.callbacks = callbacks[:] + + +class Environment(object): + """Searchable snapshot of distributions on a search path""" + + def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): + """Snapshot distributions available on a search path + + Any distributions found on `search_path` are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. + + `platform` is an optional string specifying the name of the platform + that platform-specific distributions must be compatible with. If + unspecified, it defaults to the current platform. `python` is an + optional string naming the desired version of Python (e.g. ``'2.4'``); + it defaults to the current version. + + You may explicitly set `platform` (and/or `python`) to ``None`` if you + wish to map *all* distributions, not just those compatible with the + running platform or Python version. + """ + self._distmap = {} + self._cache = {} + self.platform = platform + self.python = python + self.scan(search_path) + + def can_add(self, dist): + """Is distribution `dist` acceptable for this environment? + + The distribution must match the platform and python version + requirements specified when this environment was created, or False + is returned. + """ + return (self.python is None or dist.py_version is None + or dist.py_version==self.python) \ + and compatible_platforms(dist.platform,self.platform) + + def remove(self, dist): + """Remove `dist` from the environment""" + self._distmap[dist.key].remove(dist) + + def scan(self, search_path=None): + """Scan `search_path` for distributions usable in this environment + + Any distributions found are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. Only distributions conforming to + the platform/python version defined at initialization are added. + """ + if search_path is None: + search_path = sys.path + + for item in search_path: + for dist in find_distributions(item): + self.add(dist) + + def __getitem__(self,project_name): + """Return a newest-to-oldest list of distributions for `project_name` + """ + try: + return self._cache[project_name] + except KeyError: + project_name = project_name.lower() + if project_name not in self._distmap: + return [] + + if project_name not in self._cache: + dists = self._cache[project_name] = self._distmap[project_name] + _sort_dists(dists) + + return self._cache[project_name] + + def add(self,dist): + """Add `dist` if we ``can_add()`` it and it isn't already added""" + if self.can_add(dist) and dist.has_version(): + dists = self._distmap.setdefault(dist.key,[]) + if dist not in dists: + dists.append(dist) + if dist.key in self._cache: + _sort_dists(self._cache[dist.key]) + + + def best_match(self, req, working_set, installer=None): + """Find distribution best matching `req` and usable on `working_set` + + This calls the ``find(req)`` method of the `working_set` to see if a + suitable distribution is already active. (This may raise + ``VersionConflict`` if an unsuitable version of the project is already + active in the specified `working_set`.) + + If a suitable distribution isn't active, this method returns the + newest platform-dependent distribution in the environment that meets + the ``Requirement`` in `req`. If no suitable platform-dependent + distribution is found, then the newest platform-independent + distribution that meets the requirement is returned. (A platform- + dependent distribution will typically have code compiled or + specialized for that platform.) + + Otherwise, if `installer` is supplied, then the result of calling the + environment's ``obtain(req, installer)`` method will be returned. + """ + dist = working_set.find(req) + if dist is not None: + return dist + + # first try to find a platform-dependent dist + for dist in self[req.key]: + if dist in req and dist.platform is not None: + return dist + + # then try any other dist + for dist in self[req.key]: + if dist in req: + return dist + + return self.obtain(req, installer) # try and download/install + + def obtain(self, requirement, installer=None): + """Obtain a distribution matching `requirement` (e.g. via download) + + Obtain a distro that matches requirement (e.g. via download). In the + base ``Environment`` class, this routine just returns + ``installer(requirement)``, unless `installer` is None, in which case + None is returned instead. This method is a hook that allows subclasses + to attempt other ways of obtaining a distribution before falling back + to the `installer` argument.""" + if installer is not None: + return installer(requirement) + + def __iter__(self): + """Yield the unique project names of the available distributions""" + for key in self._distmap.keys(): + if self[key]: yield key + + + + + def __iadd__(self, other): + """In-place addition of a distribution or environment""" + if isinstance(other,Distribution): + self.add(other) + elif isinstance(other,Environment): + for project in other: + for dist in other[project]: + self.add(dist) + else: + raise TypeError("Can't add %r to environment" % (other,)) + return self + + def __add__(self, other): + """Add an environment or distribution to an environment""" + new = self.__class__([], platform=None, python=None) + for env in self, other: + new += env + return new + + +AvailableDistributions = Environment # XXX backward compatibility + + +class ExtractionError(RuntimeError): + """An error occurred extracting a resource + + The following attributes are available from instances of this exception: + + manager + The resource manager that raised this exception + + cache_path + The base directory for resource extraction + + original_error + The exception instance that caused extraction to fail + """ + + + + +class ResourceManager: + """Manage resource extraction and packages""" + extraction_path = None + + def __init__(self): + self.cached_files = {} + + def resource_exists(self, package_or_requirement, resource_name): + """Does the named resource exist?""" + return get_provider(package_or_requirement).has_resource(resource_name) + + def resource_isdir(self, package_or_requirement, resource_name): + """Is the named resource an existing directory?""" + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) + + def resource_filename(self, package_or_requirement, resource_name): + """Return a true filesystem path for specified resource""" + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name + ) + + def resource_stream(self, package_or_requirement, resource_name): + """Return a readable file-like object for specified resource""" + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name + ) + + def resource_string(self, package_or_requirement, resource_name): + """Return specified resource as a string""" + return get_provider(package_or_requirement).get_resource_string( + self, resource_name + ) + + def resource_listdir(self, package_or_requirement, resource_name): + """List the contents of the named resource directory""" + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) + + def extraction_error(self): + """Give an error message for problems extracting file(s)""" + + old_exc = sys.exc_info()[1] + cache_path = self.extraction_path or get_default_cache() + + err = ExtractionError("""Can't extract file(s) to egg cache + +The following error occurred while trying to extract file(s) to the Python egg +cache: + + %s + +The Python egg cache directory is currently set to: + + %s + +Perhaps your account does not have write access to this directory? You can +change the cache directory by setting the PYTHON_EGG_CACHE environment +variable to point to an accessible directory. +""" % (old_exc, cache_path) + ) + err.manager = self + err.cache_path = cache_path + err.original_error = old_exc + raise err + + + + + + + + + + + + + + + + def get_cache_path(self, archive_name, names=()): + """Return absolute location in cache for `archive_name` and `names` + + The parent directory of the resulting path will be created if it does + not already exist. `archive_name` should be the base filename of the + enclosing egg (which may not be the name of the enclosing zipfile!), + including its ".egg" extension. `names`, if provided, should be a + sequence of path name parts "under" the egg's extraction location. + + This method should only be called by resource providers that need to + obtain an extraction location, and only for names they intend to + extract, as it tracks the generated names for possible cleanup later. + """ + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + try: + _bypass_ensure_directory(target_path) + except: + self.extraction_error() + + self.cached_files[target_path] = 1 + return target_path + + + + + + + + + + + + + + + + + + + + def postprocess(self, tempname, filename): + """Perform any platform-specific postprocessing of `tempname` + + This is where Mac header rewrites should be done; other platforms don't + have anything special they should do. + + Resource providers should call this method ONLY after successfully + extracting a compressed resource. They must NOT call it on resources + that are already in the filesystem. + + `tempname` is the current (temporary) name of the file, and `filename` + is the name it will be renamed to by the caller after this routine + returns. + """ + + if os.name == 'posix': + # Make the resource executable + mode = ((os.stat(tempname).st_mode) | 0555) & 07777 + os.chmod(tempname, mode) + + + + + + + + + + + + + + + + + + + + + + + def set_extraction_path(self, path): + """Set the base path where resources will be extracted to, if needed. + + If you do not call this routine before any extractions take place, the + path defaults to the return value of ``get_default_cache()``. (Which + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various + platform-specific fallbacks. See that routine's documentation for more + details.) + + Resources are extracted to subdirectories of this path based upon + information given by the ``IResourceProvider``. You may set this to a + temporary directory, but then you must call ``cleanup_resources()`` to + delete the extracted files when done. There is no guarantee that + ``cleanup_resources()`` will be able to remove all extracted files. + + (Note: you may not change the extraction path for a given resource + manager once resources have been extracted, unless you first call + ``cleanup_resources()``.) + """ + if self.cached_files: + raise ValueError( + "Can't change extraction path, files already extracted" + ) + + self.extraction_path = path + + def cleanup_resources(self, force=False): + """ + Delete all extracted resource files and directories, returning a list + of the file and directory names that could not be successfully removed. + This function does not have any concurrency protection, so it should + generally only be called when the extraction path is a temporary + directory exclusive to a single process. This method is not + automatically called; you must call it explicitly or register it as an + ``atexit`` function if you wish to ensure cleanup of a temporary + directory used for extractions. + """ + # XXX + + + +def get_default_cache(): + """Determine the default cache location + + This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. + Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the + "Application Data" directory. On all other systems, it's "~/.python-eggs". + """ + try: + return os.environ['PYTHON_EGG_CACHE'] + except KeyError: + pass + + if os.name!='nt': + return os.path.expanduser('~/.python-eggs') + + app_data = 'Application Data' # XXX this may be locale-specific! + app_homes = [ + (('APPDATA',), None), # best option, should be locale-safe + (('USERPROFILE',), app_data), + (('HOMEDRIVE','HOMEPATH'), app_data), + (('HOMEPATH',), app_data), + (('HOME',), None), + (('WINDIR',), app_data), # 95/98/ME + ] + + for keys, subdir in app_homes: + dirname = '' + for key in keys: + if key in os.environ: + dirname = os.path.join(dirname, os.environ[key]) + else: + break + else: + if subdir: + dirname = os.path.join(dirname,subdir) + return os.path.join(dirname, 'Python-Eggs') + else: + raise RuntimeError( + "Please set the PYTHON_EGG_CACHE enviroment variable" + ) + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """Convert an arbitrary string to a standard version string + + Spaces become dots, and all other non-alphanumeric characters become + dashes, with runs of multiple dashes condensed to a single dash. + """ + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def safe_extra(extra): + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + """ + return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-','_') + + + + + + + + +class NullProvider: + """Try to implement resources and metadata for arbitrary PEP 302 loaders""" + + egg_name = None + egg_info = None + loader = None + + def __init__(self, module): + self.loader = getattr(module, '__loader__', None) + self.module_path = os.path.dirname(getattr(module, '__file__', '')) + + def get_resource_filename(self, manager, resource_name): + return self._fn(self.module_path, resource_name) + + def get_resource_stream(self, manager, resource_name): + return StringIO(self.get_resource_string(manager, resource_name)) + + def get_resource_string(self, manager, resource_name): + return self._get(self._fn(self.module_path, resource_name)) + + def has_resource(self, resource_name): + return self._has(self._fn(self.module_path, resource_name)) + + def has_metadata(self, name): + return self.egg_info and self._has(self._fn(self.egg_info,name)) + + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info,name)) + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + def resource_isdir(self,resource_name): + return self._isdir(self._fn(self.module_path, resource_name)) + + def metadata_isdir(self,name): + return self.egg_info and self._isdir(self._fn(self.egg_info,name)) + + + def resource_listdir(self,resource_name): + return self._listdir(self._fn(self.module_path,resource_name)) + + def metadata_listdir(self,name): + if self.egg_info: + return self._listdir(self._fn(self.egg_info,name)) + return [] + + def run_script(self,script_name,namespace): + script = 'scripts/'+script_name + if not self.has_metadata(script): + raise ResolutionError("No script named %r" % script_name) + script_text = self.get_metadata(script).replace('\r\n','\n') + script_text = script_text.replace('\r','\n') + script_filename = self._fn(self.egg_info,script) + namespace['__file__'] = script_filename + if os.path.exists(script_filename): + execfile(script_filename, namespace, namespace) + else: + from linecache import cache + cache[script_filename] = ( + len(script_text), 0, script_text.split('\n'), script_filename + ) + script_code = compile(script_text,script_filename,'exec') + exec script_code in namespace, namespace + + def _has(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _isdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _listdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _fn(self, base, resource_name): + if resource_name: + return os.path.join(base, *resource_name.split('/')) + return base + + def _get(self, path): + if hasattr(self.loader, 'get_data'): + return self.loader.get_data(path) + raise NotImplementedError( + "Can't perform this operation for loaders without 'get_data()'" + ) + +register_loader_type(object, NullProvider) + + +class EggProvider(NullProvider): + """Provider based on a virtual filesystem""" + + def __init__(self,module): + NullProvider.__init__(self,module) + self._setup_prefix() + + def _setup_prefix(self): + # we assume here that our metadata may be nested inside a "basket" + # of multiple eggs; that's why we use module_path instead of .archive + path = self.module_path + old = None + while path!=old: + if path.lower().endswith('.egg'): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path + break + old = path + path, base = os.path.split(path) + + + + + + +class DefaultProvider(EggProvider): + """Provides access to package resources in the filesystem""" + + def _has(self, path): + return os.path.exists(path) + + def _isdir(self,path): + return os.path.isdir(path) + + def _listdir(self,path): + return os.listdir(path) + + def get_resource_stream(self, manager, resource_name): + return open(self._fn(self.module_path, resource_name), 'rb') + + def _get(self, path): + stream = open(path, 'rb') + try: + return stream.read() + finally: + stream.close() + +register_loader_type(type(None), DefaultProvider) + + +class EmptyProvider(NullProvider): + """Provider that returns nothing for all requests""" + + _isdir = _has = lambda self,path: False + _get = lambda self,path: '' + _listdir = lambda self,path: [] + module_path = None + + def __init__(self): + pass + +empty_provider = EmptyProvider() + + + + +class ZipProvider(EggProvider): + """Resource support for zips and eggs""" + + eagers = None + + def __init__(self, module): + EggProvider.__init__(self,module) + self.zipinfo = zipimport._zip_directory_cache[self.loader.archive] + self.zip_pre = self.loader.archive+os.sep + + def _zipinfo_name(self, fspath): + # Convert a virtual filename (full path to file) into a zipfile subpath + # usable with the zipimport directory cache for our target archive + if fspath.startswith(self.zip_pre): + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath,self.zip_pre) + ) + + def _parts(self,zip_path): + # Convert a zipfile subpath into an egg-relative path part list + fspath = self.zip_pre+zip_path # pseudo-fs path + if fspath.startswith(self.egg_root+os.sep): + return fspath[len(self.egg_root)+1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath,self.egg_root) + ) + + def get_resource_filename(self, manager, resource_name): + if not self.egg_name: + raise NotImplementedError( + "resource_filename() only supported for .egg, not .zip" + ) + # no need to lock for extraction, since we use temp names + zip_path = self._resource_to_zip(resource_name) + eagers = self._get_eager_resources() + if '/'.join(self._parts(zip_path)) in eagers: + for name in eagers: + self._extract_resource(manager, self._eager_to_zip(name)) + return self._extract_resource(manager, zip_path) + + def _extract_resource(self, manager, zip_path): + + if zip_path in self._index(): + for name in self._index()[zip_path]: + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) + return os.path.dirname(last) # return the extracted directory name + + zip_stat = self.zipinfo[zip_path] + t,d,size = zip_stat[5], zip_stat[6], zip_stat[3] + date_time = ( + (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd + (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc. + ) + timestamp = time.mktime(date_time) + + try: + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) + + if os.path.isfile(real_path): + stat = os.stat(real_path) + if stat.st_size==size and stat.st_mtime==timestamp: + # size and stamp match, don't bother extracting + return real_path + + outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) + os.write(outf, self.loader.get_data(zip_path)) + os.close(outf) + utime(tmpnam, (timestamp,timestamp)) + manager.postprocess(tmpnam, real_path) + + try: + rename(tmpnam, real_path) + + except os.error: + if os.path.isfile(real_path): + stat = os.stat(real_path) + + if stat.st_size==size and stat.st_mtime==timestamp: + # size and stamp match, somebody did it just ahead of + # us, so we're done + return real_path + elif os.name=='nt': # Windows, del old file and retry + unlink(real_path) + rename(tmpnam, real_path) + return real_path + raise + + except os.error: + manager.extraction_error() # report a user-friendly error + + return real_path + + def _get_eager_resources(self): + if self.eagers is None: + eagers = [] + for name in ('native_libs.txt', 'eager_resources.txt'): + if self.has_metadata(name): + eagers.extend(self.get_metadata_lines(name)) + self.eagers = eagers + return self.eagers + + def _index(self): + try: + return self._dirindex + except AttributeError: + ind = {} + for path in self.zipinfo: + parts = path.split(os.sep) + while parts: + parent = os.sep.join(parts[:-1]) + if parent in ind: + ind[parent].append(parts[-1]) + break + else: + ind[parent] = [parts.pop()] + self._dirindex = ind + return ind + + def _has(self, fspath): + zip_path = self._zipinfo_name(fspath) + return zip_path in self.zipinfo or zip_path in self._index() + + def _isdir(self,fspath): + return self._zipinfo_name(fspath) in self._index() + + def _listdir(self,fspath): + return list(self._index().get(self._zipinfo_name(fspath), ())) + + def _eager_to_zip(self,resource_name): + return self._zipinfo_name(self._fn(self.egg_root,resource_name)) + + def _resource_to_zip(self,resource_name): + return self._zipinfo_name(self._fn(self.module_path,resource_name)) + +register_loader_type(zipimport.zipimporter, ZipProvider) + + + + + + + + + + + + + + + + + + + + + + + + +class FileMetadata(EmptyProvider): + """Metadata handler for standalone PKG-INFO files + + Usage:: + + metadata = FileMetadata("/path/to/PKG-INFO") + + This provider rejects all data and metadata requests except for PKG-INFO, + which is treated as existing, and will be the contents of the file at + the provided location. + """ + + def __init__(self,path): + self.path = path + + def has_metadata(self,name): + return name=='PKG-INFO' + + def get_metadata(self,name): + if name=='PKG-INFO': + return open(self.path,'rU').read() + raise KeyError("No metadata except PKG-INFO is available") + + def get_metadata_lines(self,name): + return yield_lines(self.get_metadata(name)) + + + + + + + + + + + + + + + + +class PathMetadata(DefaultProvider): + """Metadata provider for egg directories + + Usage:: + + # Development eggs: + + egg_info = "/path/to/PackageName.egg-info" + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(basedir,project_name=dist_name,metadata=metadata) + + # Unpacked egg directories: + + egg_path = "/path/to/PackageName-ver-pyver-etc.egg" + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + """ + + def __init__(self, path, egg_info): + self.module_path = path + self.egg_info = egg_info + + +class EggMetadata(ZipProvider): + """Metadata provider for .egg files""" + + def __init__(self, importer): + """Create a metadata provider from a zipimporter""" + + self.zipinfo = zipimport._zip_directory_cache[importer.archive] + self.zip_pre = importer.archive+os.sep + self.loader = importer + if importer.prefix: + self.module_path = os.path.join(importer.archive, importer.prefix) + else: + self.module_path = importer.archive + self._setup_prefix() + + +class ImpWrapper: + """PEP 302 Importer that wraps Python's "normal" import algorithm""" + + def __init__(self, path=None): + self.path = path + + def find_module(self, fullname, path=None): + subname = fullname.split(".")[-1] + if subname != fullname and self.path is None: + return None + if self.path is None: + path = None + else: + path = [self.path] + try: + file, filename, etc = imp.find_module(subname, path) + except ImportError: + return None + return ImpLoader(file, filename, etc) + + +class ImpLoader: + """PEP 302 Loader that wraps Python's "normal" import algorithm""" + + def __init__(self, file, filename, etc): + self.file = file + self.filename = filename + self.etc = etc + + def load_module(self, fullname): + try: + mod = imp.load_module(fullname, self.file, self.filename, self.etc) + finally: + if self.file: self.file.close() + # Note: we don't set __loader__ because we want the module to look + # normal; i.e. this is just a wrapper for standard import machinery + return mod + + + + +def get_importer(path_item): + """Retrieve a PEP 302 "importer" for the given path item + + If there is no importer, this returns a wrapper around the builtin import + machinery. The returned importer is only cached if it was created by a + path hook. + """ + try: + importer = sys.path_importer_cache[path_item] + except KeyError: + for hook in sys.path_hooks: + try: + importer = hook(path_item) + except ImportError: + pass + else: + break + else: + importer = None + + sys.path_importer_cache.setdefault(path_item,importer) + if importer is None: + try: + importer = ImpWrapper(path_item) + except ImportError: + pass + return importer + + + + + + + + + + + + + + +_declare_state('dict', _distribution_finders = {}) + +def register_finder(importer_type, distribution_finder): + """Register `distribution_finder` to find distributions in sys.path items + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `distribution_finder` is a callable that, passed a path + item and the importer instance, yields ``Distribution`` instances found on + that path item. See ``pkg_resources.find_on_path`` for an example.""" + _distribution_finders[importer_type] = distribution_finder + + +def find_distributions(path_item, only=False): + """Yield distributions accessible via `path_item`""" + importer = get_importer(path_item) + finder = _find_adapter(_distribution_finders, importer) + return finder(importer, path_item, only) + +def find_in_zip(importer, path_item, only=False): + metadata = EggMetadata(importer) + if metadata.has_metadata('PKG-INFO'): + yield Distribution.from_filename(path_item, metadata=metadata) + if only: + return # don't yield nested distros + for subitem in metadata.resource_listdir('/'): + if subitem.endswith('.egg'): + subpath = os.path.join(path_item, subitem) + for dist in find_in_zip(zipimport.zipimporter(subpath), subpath): + yield dist + +register_finder(zipimport.zipimporter, find_in_zip) + +def StringIO(*args, **kw): + """Thunk to load the real StringIO on demand""" + global StringIO + try: + from cStringIO import StringIO + except ImportError: + from StringIO import StringIO + return StringIO(*args,**kw) + +def find_nothing(importer, path_item, only=False): + return () +register_finder(object,find_nothing) + +def find_on_path(importer, path_item, only=False): + """Yield distributions accessible on a sys.path directory""" + path_item = _normalize_cached(path_item) + + if os.path.isdir(path_item) and os.access(path_item, os.R_OK): + if path_item.lower().endswith('.egg'): + # unpacked egg + yield Distribution.from_filename( + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item,'EGG-INFO') + ) + ) + else: + # scan for .egg and .egg-info in directory + for entry in os.listdir(path_item): + lower = entry.lower() + if lower.endswith('.egg-info'): + fullpath = os.path.join(path_item, entry) + if os.path.isdir(fullpath): + # egg-info directory, allow getting metadata + metadata = PathMetadata(path_item, fullpath) + else: + metadata = FileMetadata(fullpath) + yield Distribution.from_location( + path_item,entry,metadata,precedence=DEVELOP_DIST + ) + elif not only and lower.endswith('.egg'): + for dist in find_distributions(os.path.join(path_item, entry)): + yield dist + elif not only and lower.endswith('.egg-link'): + for line in file(os.path.join(path_item, entry)): + if not line.strip(): continue + for item in find_distributions(os.path.join(path_item,line.rstrip())): + yield item + break +register_finder(ImpWrapper, find_on_path) + +_declare_state('dict', _namespace_handlers = {}) +_declare_state('dict', _namespace_packages = {}) + +def register_namespace_handler(importer_type, namespace_handler): + """Register `namespace_handler` to declare namespace packages + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `namespace_handler` is a callable like this:: + + def namespace_handler(importer,path_entry,moduleName,module): + # return a path_entry to use for child packages + + Namespace handlers are only called if the importer object has already + agreed that it can handle the relevant path item, and they should only + return a subpath if the module __path__ does not already contain an + equivalent subpath. For an example namespace handler, see + ``pkg_resources.file_ns_handler``. + """ + _namespace_handlers[importer_type] = namespace_handler + +def _handle_ns(packageName, path_item): + """Ensure that named package includes a subpath of path_item (if needed)""" + importer = get_importer(path_item) + if importer is None: + return None + loader = importer.find_module(packageName) + if loader is None: + return None + module = sys.modules.get(packageName) + if module is None: + module = sys.modules[packageName] = imp.new_module(packageName) + module.__path__ = []; _set_parent_ns(packageName) + elif not hasattr(module,'__path__'): + raise TypeError("Not a package:", packageName) + handler = _find_adapter(_namespace_handlers, importer) + subpath = handler(importer,path_item,packageName,module) + if subpath is not None: + path = module.__path__; path.append(subpath) + loader.load_module(packageName); module.__path__ = path + return subpath + +def declare_namespace(packageName): + """Declare that package 'packageName' is a namespace package""" + + imp.acquire_lock() + try: + if packageName in _namespace_packages: + return + + path, parent = sys.path, None + if '.' in packageName: + parent = '.'.join(packageName.split('.')[:-1]) + declare_namespace(parent) + __import__(parent) + try: + path = sys.modules[parent].__path__ + except AttributeError: + raise TypeError("Not a package:", parent) + + # Track what packages are namespaces, so when new path items are added, + # they can be updated + _namespace_packages.setdefault(parent,[]).append(packageName) + _namespace_packages.setdefault(packageName,[]) + + for path_item in path: + # Ensure all the parent's path items are reflected in the child, + # if they apply + _handle_ns(packageName, path_item) + + finally: + imp.release_lock() + +def fixup_namespace_packages(path_item, parent=None): + """Ensure that previously-declared namespace packages include path_item""" + imp.acquire_lock() + try: + for package in _namespace_packages.get(parent,()): + subpath = _handle_ns(package, path_item) + if subpath: fixup_namespace_packages(subpath,package) + finally: + imp.release_lock() + +def file_ns_handler(importer, path_item, packageName, module): + """Compute an ns-package subpath for a filesystem or zipfile importer""" + + subpath = os.path.join(path_item, packageName.split('.')[-1]) + normalized = _normalize_cached(subpath) + for item in module.__path__: + if _normalize_cached(item)==normalized: + break + else: + # Only return the path if it's not already there + return subpath + +register_namespace_handler(ImpWrapper,file_ns_handler) +register_namespace_handler(zipimport.zipimporter,file_ns_handler) + + +def null_ns_handler(importer, path_item, packageName, module): + return None + +register_namespace_handler(object,null_ns_handler) + + +def normalize_path(filename): + """Normalize a file/dir name for comparison purposes""" + return os.path.normcase(os.path.realpath(filename)) + +def _normalize_cached(filename,_cache={}): + try: + return _cache[filename] + except KeyError: + _cache[filename] = result = normalize_path(filename) + return result + +def _set_parent_ns(packageName): + parts = packageName.split('.') + name = parts.pop() + if parts: + parent = '.'.join(parts) + setattr(sys.modules[parent], name, sys.modules[packageName]) + + +def yield_lines(strs): + """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" + if isinstance(strs,basestring): + for s in strs.splitlines(): + s = s.strip() + if s and not s.startswith('#'): # skip blank lines/comments + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + +LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment +CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation +DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra +VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info +COMMA = re.compile(r"\s*,").match # comma between items +OBRACKET = re.compile(r"\s*\[").match +CBRACKET = re.compile(r"\s*\]").match +MODULE = re.compile(r"\w+(\.\w+)*$").match +EGG_NAME = re.compile( + r"(?P[^-]+)" + r"( -(?P[^-]+) (-py(?P[^-]+) (-(?P.+))? )? )?", + re.VERBOSE | re.IGNORECASE +).match + +component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) +replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get + +def _parse_version_parts(s): + for part in component_re.split(s): + part = replace(part,part) + if not part or part=='.': + continue + if part[:1] in '0123456789': + yield part.zfill(8) # pad for numeric comparison + else: + yield '*'+part + + yield '*final' # ensure that alpha/beta/candidate are before final + +def parse_version(s): + """Convert a version string to a chronologically-sortable key + + This is a rough cross between distutils' StrictVersion and LooseVersion; + if you give it versions that would work with StrictVersion, then it behaves + the same; otherwise it acts like a slightly-smarter LooseVersion. It is + *possible* to create pathological version coding schemes that will fool + this parser, but they should be very rare in practice. + + The returned value will be a tuple of strings. Numeric portions of the + version are padded to 8 digits so they will compare numerically, but + without relying on how numbers compare relative to strings. Dots are + dropped, but dashes are retained. Trailing zeros between alpha segments + or dashes are suppressed, so that e.g. "2.4.0" is considered the same as + "2.4". Alphanumeric parts are lower-cased. + + The algorithm assumes that strings like "-" and any alpha string that + alphabetically follows "final" represents a "patch level". So, "2.4-1" + is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is + considered newer than "2.4-1", which in turn is newer than "2.4". + + Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that + come before "final" alphabetically) are assumed to be pre-release versions, + so that the version "2.4" is considered newer than "2.4a1". + + Finally, to handle miscellaneous cases, the strings "pre", "preview", and + "rc" are treated as if they were "c", i.e. as though they were release + candidates, and therefore are not as new as a version string that does not + contain them, and "dev" is replaced with an '@' so that it sorts lower than + than any other pre-release tag. + """ + parts = [] + for part in _parse_version_parts(s.lower()): + if part.startswith('*'): + if part<'*final': # remove '-' before a prerelease tag + while parts and parts[-1]=='*final-': parts.pop() + # remove trailing zeros from each series of numeric parts + while parts and parts[-1]=='00000000': + parts.pop() + parts.append(part) + return tuple(parts) + +class EntryPoint(object): + """Object representing an advertised importable object""" + + def __init__(self, name, module_name, attrs=(), extras=(), dist=None): + if not MODULE(module_name): + raise ValueError("Invalid module name", module_name) + self.name = name + self.module_name = module_name + self.attrs = tuple(attrs) + self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras + self.dist = dist + + def __str__(self): + s = "%s = %s" % (self.name, self.module_name) + if self.attrs: + s += ':' + '.'.join(self.attrs) + if self.extras: + s += ' [%s]' % ','.join(self.extras) + return s + + def __repr__(self): + return "EntryPoint.parse(%r)" % str(self) + + def load(self, require=True, env=None, installer=None): + if require: self.require(env, installer) + entry = __import__(self.module_name, globals(),globals(), ['__name__']) + for attr in self.attrs: + try: + entry = getattr(entry,attr) + except AttributeError: + raise ImportError("%r has no %r attribute" % (entry,attr)) + return entry + + def require(self, env=None, installer=None): + if self.extras and not self.dist: + raise UnknownExtra("Can't require() without a distribution", self) + map(working_set.add, + working_set.resolve(self.dist.requires(self.extras),env,installer)) + + + + #@classmethod + def parse(cls, src, dist=None): + """Parse a single entry point from string `src` + + Entry point syntax follows the form:: + + name = some.module:some.attr [extra1,extra2] + + The entry name and module name are required, but the ``:attrs`` and + ``[extras]`` parts are optional + """ + try: + attrs = extras = () + name,value = src.split('=',1) + if '[' in value: + value,extras = value.split('[',1) + req = Requirement.parse("x["+extras) + if req.specs: raise ValueError + extras = req.extras + if ':' in value: + value,attrs = value.split(':',1) + if not MODULE(attrs.rstrip()): + raise ValueError + attrs = attrs.rstrip().split('.') + except ValueError: + raise ValueError( + "EntryPoint must be in 'name=module:attrs [extras]' format", + src + ) + else: + return cls(name.strip(), value.strip(), attrs, extras, dist) + + parse = classmethod(parse) + + + + + + + + + #@classmethod + def parse_group(cls, group, lines, dist=None): + """Parse an entry point group""" + if not MODULE(group): + raise ValueError("Invalid group name", group) + this = {} + for line in yield_lines(lines): + ep = cls.parse(line, dist) + if ep.name in this: + raise ValueError("Duplicate entry point", group, ep.name) + this[ep.name]=ep + return this + + parse_group = classmethod(parse_group) + + #@classmethod + def parse_map(cls, data, dist=None): + """Parse a map of entry point groups""" + if isinstance(data,dict): + data = data.items() + else: + data = split_sections(data) + maps = {} + for group, lines in data: + if group is None: + if not lines: + continue + raise ValueError("Entry points must be listed in groups") + group = group.strip() + if group in maps: + raise ValueError("Duplicate group name", group) + maps[group] = cls.parse_group(group, lines, dist) + return maps + + parse_map = classmethod(parse_map) + + + + + + +class Distribution(object): + """Wrap an actual or potential sys.path entry w/metadata""" + def __init__(self, + location=None, metadata=None, project_name=None, version=None, + py_version=PY_MAJOR, platform=None, precedence = EGG_DIST + ): + self.project_name = safe_name(project_name or 'Unknown') + if version is not None: + self._version = safe_version(version) + self.py_version = py_version + self.platform = platform + self.location = location + self.precedence = precedence + self._provider = metadata or empty_provider + + #@classmethod + def from_location(cls,location,basename,metadata=None,**kw): + project_name, version, py_version, platform = [None]*4 + basename, ext = os.path.splitext(basename) + if ext.lower() in (".egg",".egg-info"): + match = EGG_NAME(basename) + if match: + project_name, version, py_version, platform = match.group( + 'name','ver','pyver','plat' + ) + return cls( + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw + ) + from_location = classmethod(from_location) + + hashcmp = property( + lambda self: ( + getattr(self,'parsed_version',()), self.precedence, self.key, + -len(self.location or ''), self.location, self.py_version, + self.platform + ) + ) + def __cmp__(self, other): return cmp(self.hashcmp, other) + def __hash__(self): return hash(self.hashcmp) + + # These properties have to be lazy so that we don't have to load any + # metadata until/unless it's actually needed. (i.e., some distributions + # may not know their name or version without loading PKG-INFO) + + #@property + def key(self): + try: + return self._key + except AttributeError: + self._key = key = self.project_name.lower() + return key + key = property(key) + + #@property + def parsed_version(self): + try: + return self._parsed_version + except AttributeError: + self._parsed_version = pv = parse_version(self.version) + return pv + + parsed_version = property(parsed_version) + + #@property + def version(self): + try: + return self._version + except AttributeError: + for line in self._get_metadata('PKG-INFO'): + if line.lower().startswith('version:'): + self._version = safe_version(line.split(':',1)[1].strip()) + return self._version + else: + raise ValueError( + "Missing 'Version:' header and/or PKG-INFO file", self + ) + version = property(version) + + + + + #@property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + dm = self.__dep_map = {None: []} + for name in 'requires.txt', 'depends.txt': + for extra,reqs in split_sections(self._get_metadata(name)): + if extra: extra = safe_extra(extra) + dm.setdefault(extra,[]).extend(parse_requirements(reqs)) + return dm + _dep_map = property(_dep_map) + + def requires(self,extras=()): + """List of Requirements needed for this distro if `extras` are used""" + dm = self._dep_map + deps = [] + deps.extend(dm.get(None,())) + for ext in extras: + try: + deps.extend(dm[safe_extra(ext)]) + except KeyError: + raise UnknownExtra( + "%s has no such extra feature %r" % (self, ext) + ) + return deps + + def _get_metadata(self,name): + if self.has_metadata(name): + for line in self.get_metadata_lines(name): + yield line + + def activate(self,path=None): + """Ensure distribution is importable on `path` (default=sys.path)""" + if path is None: path = sys.path + self.insert_on(path) + if path is sys.path: + fixup_namespace_packages(self.location) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: declare_namespace(pkg) + + def egg_name(self): + """Return what this distribution's standard .egg filename should be""" + filename = "%s-%s-py%s" % ( + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR + ) + + if self.platform: + filename += '-'+self.platform + return filename + + def __repr__(self): + if self.location: + return "%s (%s)" % (self,self.location) + else: + return str(self) + + def __str__(self): + try: version = getattr(self,'version',None) + except ValueError: version = None + version = version or "[unknown version]" + return "%s %s" % (self.project_name,version) + + def __getattr__(self,attr): + """Delegate all unrecognized public attributes to .metadata provider""" + if attr.startswith('_'): + raise AttributeError,attr + return getattr(self._provider, attr) + + #@classmethod + def from_filename(cls,filename,metadata=None, **kw): + return cls.from_location( + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw + ) + from_filename = classmethod(from_filename) + + def as_requirement(self): + """Return a ``Requirement`` that matches this distribution exactly""" + return Requirement.parse('%s==%s' % (self.project_name, self.version)) + + def load_entry_point(self, group, name): + """Return the `name` entry point of `group` or raise ImportError""" + ep = self.get_entry_info(group,name) + if ep is None: + raise ImportError("Entry point %r not found" % ((group,name),)) + return ep.load() + + def get_entry_map(self, group=None): + """Return the entry point map for `group`, or the full entry map""" + try: + ep_map = self._ep_map + except AttributeError: + ep_map = self._ep_map = EntryPoint.parse_map( + self._get_metadata('entry_points.txt'), self + ) + if group is not None: + return ep_map.get(group,{}) + return ep_map + + def get_entry_info(self, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return self.get_entry_map(group).get(name) + + + + + + + + + + + + + + + + + + + + def insert_on(self, path, loc = None): + """Insert self.location in path before its nearest parent directory""" + + loc = loc or self.location + if not loc: + return + + nloc = _normalize_cached(loc) + bdir = os.path.dirname(nloc) + npath= [(p and _normalize_cached(p) or p) for p in path] + + bp = None + for p, item in enumerate(npath): + if item==nloc: + break + elif item==bdir and self.precedence==EGG_DIST: + # if it's an .egg, give it precedence over its directory + if path is sys.path: + self.check_version_conflict() + path.insert(p, loc) + npath.insert(p, nloc) + break + else: + if path is sys.path: + self.check_version_conflict() + path.append(loc) + return + + # p is the spot where we found or inserted loc; now remove duplicates + while 1: + try: + np = npath.index(nloc, p+1) + except ValueError: + break + else: + del npath[np], path[np] + p = np # ha! + + return + + + def check_version_conflict(self): + if self.key=='setuptools': + return # ignore the inevitable setuptools self-conflicts :( + + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) + loc = normalize_path(self.location) + for modname in self._get_metadata('top_level.txt'): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages + ): + continue + + fn = getattr(sys.modules[modname], '__file__', None) + if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)): + continue + issue_warning( + "Module %s was already imported from %s, but %s is being added" + " to sys.path" % (modname, fn, self.location), + ) + + def has_version(self): + try: + self.version + except ValueError: + issue_warning("Unbuilt egg for "+repr(self)) + return False + return True + + def clone(self,**kw): + """Copy this distribution, substituting in any changed keyword args""" + for attr in ( + 'project_name', 'version', 'py_version', 'platform', 'location', + 'precedence' + ): + kw.setdefault(attr, getattr(self,attr,None)) + kw.setdefault('metadata', self._provider) + return self.__class__(**kw) + + + + + #@property + def extras(self): + return [dep for dep in self._dep_map if dep] + extras = property(extras) + + +def issue_warning(*args,**kw): + level = 1 + g = globals() + try: + # find the first stack frame that is *not* code in + # the pkg_resources module, to use for the warning + while sys._getframe(level).f_globals is g: + level += 1 + except ValueError: + pass + from warnings import warn + warn(stacklevel = level+1, *args, **kw) + + + + + + + + + + + + + + + + + + + + + + + +def parse_requirements(strs): + """Yield ``Requirement`` objects for each specification in `strs` + + `strs` must be an instance of ``basestring``, or a (possibly-nested) + iterable thereof. + """ + # create a steppable iterator, so we can handle \-continuations + lines = iter(yield_lines(strs)) + + def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): + + items = [] + + while not TERMINATOR(line,p): + if CONTINUE(line,p): + try: + line = lines.next(); p = 0 + except StopIteration: + raise ValueError( + "\\ must not appear on the last nonblank line" + ) + + match = ITEM(line,p) + if not match: + raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) + + items.append(match.group(*groups)) + p = match.end() + + match = COMMA(line,p) + if match: + p = match.end() # skip the comma + elif not TERMINATOR(line,p): + raise ValueError( + "Expected ',' or end-of-list in",line,"at",line[p:] + ) + + match = TERMINATOR(line,p) + if match: p = match.end() # skip the terminator, if any + return line, p, items + + for line in lines: + match = DISTRO(line) + if not match: + raise ValueError("Missing distribution spec", line) + project_name = match.group(1) + p = match.end() + extras = [] + + match = OBRACKET(line,p) + if match: + p = match.end() + line, p, extras = scan_list( + DISTRO, CBRACKET, line, p, (1,), "'extra' name" + ) + + line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") + specs = [(op,safe_version(val)) for op,val in specs] + yield Requirement(project_name, specs, extras) + + +def _sort_dists(dists): + tmp = [(dist.hashcmp,dist) for dist in dists] + tmp.sort() + dists[::-1] = [d for hc,d in tmp] + + + + + + + + + + + + + + + + + +class Requirement: + def __init__(self, project_name, specs, extras): + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" + self.unsafe_name, project_name = project_name, safe_name(project_name) + self.project_name, self.key = project_name, project_name.lower() + index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] + index.sort() + self.specs = [(op,ver) for parsed,trans,op,ver in index] + self.index, self.extras = index, tuple(map(safe_extra,extras)) + self.hashCmp = ( + self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), + frozenset(self.extras) + ) + self.__hash = hash(self.hashCmp) + + def __str__(self): + specs = ','.join([''.join(s) for s in self.specs]) + extras = ','.join(self.extras) + if extras: extras = '[%s]' % extras + return '%s%s%s' % (self.project_name, extras, specs) + + def __eq__(self,other): + return isinstance(other,Requirement) and self.hashCmp==other.hashCmp + + def __contains__(self,item): + if isinstance(item,Distribution): + if item.key != self.key: return False + if self.index: item = item.parsed_version # only get if we need it + elif isinstance(item,basestring): + item = parse_version(item) + last = None + for parsed,trans,op,ver in self.index: + action = trans[cmp(item,parsed)] + if action=='F': return False + elif action=='T': return True + elif action=='+': last = True + elif action=='-' or last is None: last = False + if last is None: last = True # no rules encountered + return last + + + def __hash__(self): + return self.__hash + + def __repr__(self): return "Requirement.parse(%r)" % str(self) + + #@staticmethod + def parse(s): + reqs = list(parse_requirements(s)) + if reqs: + if len(reqs)==1: + return reqs[0] + raise ValueError("Expected only one requirement", s) + raise ValueError("No requirements found", s) + + parse = staticmethod(parse) + +state_machine = { + # =>< + '<' : '--T', + '<=': 'T-T', + '>' : 'F+F', + '>=': 'T+F', + '==': 'T..', + '!=': 'F++', +} + + +def _get_mro(cls): + """Get an mro for a type or classic class""" + if not isinstance(cls,type): + class cls(cls,object): pass + return cls.__mro__[1:] + return cls.__mro__ + +def _find_adapter(registry, ob): + """Return an adapter factory for `ob` from `registry`""" + for t in _get_mro(getattr(ob, '__class__', type(ob))): + if t in registry: + return registry[t] + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def split_sections(s): + """Split a string or iterable thereof into (section,content) pairs + + Each ``section`` is a stripped version of the section header ("[section]") + and each ``content`` is a list of stripped lines excluding blank lines and + comment-only lines. If there are any such lines before the first section + header, they're returned in a first ``section`` of ``None``. + """ + section = None + content = [] + for line in yield_lines(s): + if line.startswith("["): + if line.endswith("]"): + if section or content: + yield section, content + section = line[1:-1].strip() + content = [] + else: + raise ValueError("Invalid section heading", line) + else: + content.append(line) + + # wrap up last segment + yield section, content + +def _mkstemp(*args,**kw): + from tempfile import mkstemp + old_open = os.open + try: + os.open = os_open # temporarily bypass sandboxing + return mkstemp(*args,**kw) + finally: + os.open = old_open # and then put it back + + +# Set up global resource manager (deliberately not state-saved) +_manager = ResourceManager() +def _initialize(g): + for name in dir(_manager): + if not name.startswith('_'): + g[name] = getattr(_manager, name) +_initialize(globals()) + +# Prepare the master working set and make the ``require()`` API available +__requires__ = None +_declare_state('object', working_set = WorkingSet()) +try: + # Does the main program list any requirements? + from __main__ import __requires__ +except ImportError: + pass # No: just use the default working set based on sys.path +else: + # Yes: ensure the requirements are met, by prefixing sys.path if necessary + try: + working_set.require(__requires__) + except (VersionConflict, DistributionNotFound): # try it without defaults already on sys.path + working_set = WorkingSet([]) # by starting with an empty path + try: + for dist in working_set.resolve( + parse_requirements(__requires__), Environment() + ): + working_set.add(dist) + except DistributionNotFound: + pass + for entry in sys.path: # add any missing entries from sys.path + if entry not in working_set.entries: + working_set.add_entry(entry) + sys.path[:] = working_set.entries # then copy back to sys.path + +require = working_set.require +iter_entry_points = working_set.iter_entry_points +add_activation_listener = working_set.subscribe +run_script = working_set.run_script +run_main = run_script # backward compatibility +# Activate all distributions already on sys.path, and ensure that +# all distributions added to the working set in the future (e.g. by +# calling ``require()``) will get activated as well. +add_activation_listener(lambda dist: dist.activate()) +working_set.entries=[]; map(working_set.add_entry,sys.path) # match order + diff --git a/setuptools-0.6c16dev3.egg/setuptools/__init__.py b/setuptools-0.6c16dev3.egg/setuptools/__init__.py new file mode 100644 index 0000000..d57448f --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/__init__.py @@ -0,0 +1,90 @@ +"""Extensions to the 'distutils' for large or complex distributions""" +from setuptools.extension import Extension, Library +from setuptools.dist import Distribution, Feature, _get_unpatched +import distutils.core, setuptools.command +from setuptools.depends import Require +from distutils.core import Command as _Command +from distutils.util import convert_path +import os.path +import os +import sys + +__version__ = '0.6c16dev3' +__all__ = [ + 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', + 'find_packages' +] + +bootstrap_install_from = None + +def find_packages(where='.', exclude=()): + """Return a list all Python packages found within directory 'where' + + 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it + will be converted to the appropriate local path syntax. 'exclude' is a + sequence of package names to exclude; '*' can be used as a wildcard in the + names, such that 'foo.*' will exclude all subpackages of 'foo' (but not + 'foo' itself). + """ + out = [] + stack=[(convert_path(where), '')] + while stack: + where,prefix = stack.pop(0) + for name in os.listdir(where): + fn = os.path.join(where,name) + if ('.' not in name and os.path.isdir(fn) and + os.path.isfile(os.path.join(fn,'__init__.py')) + ): + out.append(prefix+name); stack.append((fn,prefix+name+'.')) + for pat in list(exclude)+['ez_setup']: + from fnmatch import fnmatchcase + out = [item for item in out if not fnmatchcase(item,pat)] + return out + +setup = distutils.core.setup + +_Command = _get_unpatched(_Command) + +class Command(_Command): + __doc__ = _Command.__doc__ + + command_consumes_arguments = False + + def __init__(self, dist, **kw): + # Add support for keyword arguments + _Command.__init__(self,dist) + for k,v in kw.items(): + setattr(self,k,v) + + def reinitialize_command(self, command, reinit_subcommands=0, **kw): + cmd = _Command.reinitialize_command(self, command, reinit_subcommands) + for k,v in kw.items(): + setattr(cmd,k,v) # update command with keywords + return cmd + +import distutils.core +distutils.core.Command = Command # we can't patch distutils.cmd, alas + +def findall(dir = os.curdir): + """Find all files under 'dir' and return the list of full filenames + (relative to 'dir'). + """ + all_files = [] + for base, dirs, files in os.walk(dir): + if base==os.curdir or base.startswith(os.curdir+os.sep): + base = base[2:] + if base: + files = [os.path.join(base, f) for f in files] + all_files.extend(filter(os.path.isfile, files)) + return all_files + +import distutils.filelist +distutils.filelist.findall = findall # fix findall bug in distutils. + + +# sys.dont_write_bytecode was introduced in Python 2.6. +if ((hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode) or + (not hasattr(sys, "dont_write_bytecode") and os.environ.get("PYTHONDONTWRITEBYTECODE"))): + _dont_write_bytecode = True +else: + _dont_write_bytecode = False diff --git a/setuptools-0.6c16dev3.egg/setuptools/archive_util.py b/setuptools-0.6c16dev3.egg/setuptools/archive_util.py new file mode 100644 index 0000000..cd4c3fb --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/archive_util.py @@ -0,0 +1,205 @@ +"""Utilities for extracting common archive formats""" + + +__all__ = [ + "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", + "UnrecognizedFormat", "extraction_drivers", "unpack_directory", +] + +import zipfile, tarfile, os, shutil +from pkg_resources import ensure_directory +from distutils.errors import DistutilsError + +class UnrecognizedFormat(DistutilsError): + """Couldn't recognize the archive type""" + +def default_filter(src,dst): + """The default progress/filter callback; returns True for all files""" + return dst + + + + + + + + + + + + + + + + + + + + + + + +def unpack_archive(filename, extract_dir, progress_filter=default_filter, + drivers=None +): + """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` + + `progress_filter` is a function taking two arguments: a source path + internal to the archive ('/'-separated), and a filesystem path where it + will be extracted. The callback must return the desired extract path + (which may be the same as the one passed in), or else ``None`` to skip + that file or directory. The callback can thus be used to report on the + progress of the extraction, as well as to filter the items extracted or + alter their extraction paths. + + `drivers`, if supplied, must be a non-empty sequence of functions with the + same signature as this function (minus the `drivers` argument), that raise + ``UnrecognizedFormat`` if they do not support extracting the designated + archive type. The `drivers` are tried in sequence until one is found that + does not raise an error, or until all are exhausted (in which case + ``UnrecognizedFormat`` is raised). If you do not supply a sequence of + drivers, the module's ``extraction_drivers`` constant will be used, which + means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that + order. + """ + for driver in drivers or extraction_drivers: + try: + driver(filename, extract_dir, progress_filter) + except UnrecognizedFormat: + continue + else: + return + else: + raise UnrecognizedFormat( + "Not a recognized archive type: %s" % filename + ) + + + + + + + +def unpack_directory(filename, extract_dir, progress_filter=default_filter): + """"Unpack" a directory, using the same interface as for archives + + Raises ``UnrecognizedFormat`` if `filename` is not a directory + """ + if not os.path.isdir(filename): + raise UnrecognizedFormat("%s is not a directory" % (filename,)) + + paths = {filename:('',extract_dir)} + for base, dirs, files in os.walk(filename): + src,dst = paths[base] + for d in dirs: + paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) + for f in files: + name = src+f + target = os.path.join(dst,f) + target = progress_filter(src+f, target) + if not target: + continue # skip non-files + ensure_directory(target) + f = os.path.join(base,f) + shutil.copyfile(f, target) + shutil.copystat(f, target) + + + + + + + + + + + + + + + + + + +def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): + """Unpack zip `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined + by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + + if not zipfile.is_zipfile(filename): + raise UnrecognizedFormat("%s is not a zip file" % (filename,)) + + z = zipfile.ZipFile(filename) + try: + for info in z.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name: + continue + + target = os.path.join(extract_dir, *name.split('/')) + target = progress_filter(name, target) + if not target: + continue + if name.endswith('/'): + # directory + ensure_directory(target) + else: + # file + ensure_directory(target) + data = z.read(info.filename) + f = open(target,'wb') + try: + f.write(data) + finally: + f.close() + del data + finally: + z.close() + + +def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined + by ``tarfile.open()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise UnrecognizedFormat( + "%s is not a compressed or uncompressed tar file" % (filename,) + ) + + try: + tarobj.chown = lambda *args: None # don't do any chowning! + for member in tarobj: + if member.isfile() or member.isdir(): + name = member.name + # don't extract absolute paths or ones with .. in them + if not name.startswith('/') and '..' not in name: + dst = os.path.join(extract_dir, *name.split('/')) + dst = progress_filter(name, dst) + if dst: + if dst.endswith(os.sep): + dst = dst[:-1] + try: + tarobj._extract_member(member,dst) # XXX Ugh + except tarfile.ExtractError: + pass # chown/chmod/mkfifo/mknode/makedev failed + return True + finally: + tarobj.close() + + + + +extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/cli.exe b/setuptools-0.6c16dev3.egg/setuptools/cli.exe new file mode 100644 index 0000000..3173b2b Binary files /dev/null and b/setuptools-0.6c16dev3.egg/setuptools/cli.exe differ diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/__init__.py b/setuptools-0.6c16dev3.egg/setuptools/command/__init__.py new file mode 100644 index 0000000..80969f3 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/__init__.py @@ -0,0 +1,20 @@ +__all__ = [ + 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', + 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', + 'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts', + 'register', 'bdist_wininst', 'scriptsetup', +] + +import sys +if sys.version>='2.5': + # In Python 2.5 and above, distutils includes its own upload command + __all__.remove('upload') + + +from distutils.command.bdist import bdist + +if 'egg' not in bdist.format_commands: + bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") + bdist.format_commands.append('egg') + +del bdist, sys diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/alias.py b/setuptools-0.6c16dev3.egg/setuptools/command/alias.py new file mode 100644 index 0000000..3e69ef6 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/alias.py @@ -0,0 +1,79 @@ +import distutils, os +from setuptools import Command +from distutils.util import convert_path +from distutils import log +from distutils.errors import * +from setuptools.command.setopt import edit_config, option_base, config_file + +def shquote(arg): + """Quote an argument for later parsing by shlex.split()""" + for c in '"', "'", "\\", "#": + if c in arg: return repr(arg) + if arg.split()!=[arg]: + return repr(arg) + return arg + + +class alias(option_base): + """Define a shortcut that invokes one or more commands""" + + description = "define a shortcut to invoke one or more commands" + command_consumes_arguments = True + + user_options = [ + ('remove', 'r', 'remove (unset) the alias'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.args = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.remove and len(self.args)!=1: + raise DistutilsOptionError( + "Must specify exactly one argument (the alias name) when " + "using --remove" + ) + + def run(self): + aliases = self.distribution.get_option_dict('aliases') + + if not self.args: + print "Command Aliases" + print "---------------" + for alias in aliases: + print "setup.py alias", format_alias(alias, aliases) + return + + elif len(self.args)==1: + alias, = self.args + if self.remove: + command = None + elif alias in aliases: + print "setup.py alias", format_alias(alias, aliases) + return + else: + print "No alias definition found for %r" % alias + return + else: + alias = self.args[0] + command = ' '.join(map(shquote,self.args[1:])) + + edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run) + + +def format_alias(name, aliases): + source, command = aliases[name] + if source == config_file('global'): + source = '--global-config ' + elif source == config_file('user'): + source = '--user-config ' + elif source == config_file('local'): + source = '' + else: + source = '--filename=%r' % source + return source+name+' '+command diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/bdist_egg.py b/setuptools-0.6c16dev3.egg/setuptools/command/bdist_egg.py new file mode 100644 index 0000000..7e5a379 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/bdist_egg.py @@ -0,0 +1,533 @@ +"""setuptools.command.bdist_egg + +Build .egg distributions""" + +# This module should be kept compatible with Python 2.3 +import sys, os, marshal +from setuptools import Command +from distutils.dir_util import remove_tree, mkpath +from distutils.sysconfig import get_python_version, get_python_lib +from distutils import log +from distutils.errors import DistutilsSetupError +from pkg_resources import get_build_platform, Distribution, ensure_directory +from pkg_resources import EntryPoint +from types import CodeType +from setuptools.extension import Library + +def strip_module(filename): + if '.' in filename: + filename = os.path.splitext(filename)[0] + if filename.endswith('module'): + filename = filename[:-6] + return filename + +def write_stub(resource, pyfile): + f = open(pyfile,'w') + f.write('\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __loader__, __file__", + " import sys, pkg_resources, imp", + " __file__ = pkg_resources.resource_filename(__name__,%r)" + % resource, + " __loader__ = None; del __bootstrap__, __loader__", + " imp.load_dynamic(__name__,__file__)", + "__bootstrap__()", + "" # terminal \n + ])) + f.close() + +# stub __init__.py for packages distributed without one +NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)' + +class bdist_egg(Command): + + description = "create an \"egg\" distribution" + + user_options = [ + ('bdist-dir=', 'b', + "temporary directory for creating the distribution"), + ('plat-name=', 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_build_platform()), + ('exclude-source-files', None, + "remove all .py files from the generated egg"), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ] + + boolean_options = [ + 'keep-temp', 'skip-build', 'exclude-source-files' + ] + + + + + + + + + + + + + + + + + + def initialize_options (self): + self.bdist_dir = None + self.plat_name = None + self.keep_temp = 0 + self.dist_dir = None + self.skip_build = 0 + self.egg_output = None + self.exclude_source_files = None + + + def finalize_options(self): + ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") + self.egg_info = ei_cmd.egg_info + + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'egg') + + if self.plat_name is None: + self.plat_name = get_build_platform() + + self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) + + if self.egg_output is None: + + # Compute filename of the output egg + basename = Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version, + get_python_version(), + self.distribution.has_ext_modules() and self.plat_name + ).egg_name() + + self.egg_output = os.path.join(self.dist_dir, basename+'.egg') + + + + + + + + + def do_install_data(self): + # Hack for packages that install data to install's --install-lib + self.get_finalized_command('install').install_lib = self.bdist_dir + + site_packages = os.path.normcase(os.path.realpath(get_python_lib())) + old, self.distribution.data_files = self.distribution.data_files,[] + + for item in old: + if isinstance(item,tuple) and len(item)==2: + if os.path.isabs(item[0]): + realpath = os.path.realpath(item[0]) + normalized = os.path.normcase(realpath) + if normalized==site_packages or normalized.startswith( + site_packages+os.sep + ): + item = realpath[len(site_packages)+1:], item[1] + # XXX else: raise ??? + self.distribution.data_files.append(item) + + try: + log.info("installing package data to %s" % self.bdist_dir) + self.call_command('install_data', force=0, root=None) + finally: + self.distribution.data_files = old + + + def get_outputs(self): + return [self.egg_output] + + + def call_command(self,cmdname,**kw): + """Invoke reinitialized command `cmdname` with keyword args""" + for dirname in INSTALL_DIRECTORY_ATTRS: + kw.setdefault(dirname,self.bdist_dir) + kw.setdefault('skip_build',self.skip_build) + kw.setdefault('dry_run', self.dry_run) + cmd = self.reinitialize_command(cmdname, **kw) + self.run_command(cmdname) + return cmd + + + def run(self): + # Generate metadata first + self.run_command("egg_info") + # We run install_lib before install_data, because some data hacks + # pull their data path from the install_lib command. + log.info("installing library code to %s" % self.bdist_dir) + instcmd = self.get_finalized_command('install') + old_root = instcmd.root; instcmd.root = None + if self.distribution.has_c_libraries() and not self.skip_build: + self.run_command('build_clib') + cmd = self.call_command('install_lib', warn_dir=0) + instcmd.root = old_root + + all_outputs, ext_outputs = self.get_ext_outputs() + self.stubs = [] + to_compile = [] + for (p,ext_name) in enumerate(ext_outputs): + filename,ext = os.path.splitext(ext_name) + pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py') + self.stubs.append(pyfile) + log.info("creating stub loader for %s" % ext_name) + if not self.dry_run: + write_stub(os.path.basename(ext_name), pyfile) + to_compile.append(pyfile) + ext_outputs[p] = ext_name.replace(os.sep,'/') + + to_compile.extend(self.make_init_files()) + if to_compile: + cmd.byte_compile(to_compile) + if self.distribution.data_files: + self.do_install_data() + + # Make the EGG-INFO directory + archive_root = self.bdist_dir + egg_info = os.path.join(archive_root,'EGG-INFO') + self.mkpath(egg_info) + if self.distribution.scripts: + script_dir = os.path.join(egg_info, 'scripts') + log.info("installing scripts to %s" % script_dir) + self.call_command('install_scripts',install_dir=script_dir,no_ep=1) + + self.copy_metadata_to(egg_info) + native_libs = os.path.join(egg_info, "native_libs.txt") + if all_outputs: + log.info("writing %s" % native_libs) + if not self.dry_run: + ensure_directory(native_libs) + libs_file = open(native_libs, 'wt') + libs_file.write('\n'.join(all_outputs)) + libs_file.write('\n') + libs_file.close() + elif os.path.isfile(native_libs): + log.info("removing %s" % native_libs) + if not self.dry_run: + os.unlink(native_libs) + + write_safety_flag( + os.path.join(archive_root,'EGG-INFO'), self.zip_safe() + ) + + if os.path.exists(os.path.join(self.egg_info,'depends.txt')): + log.warn( + "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + if self.exclude_source_files: + self.zap_pyfiles() + + # Make the archive + make_zipfile(self.egg_output, archive_root, verbose=self.verbose, + dry_run=self.dry_run, mode=self.gen_header()) + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) + + # Add to 'Distribution.dist_files' so that the "upload" command works + getattr(self.distribution,'dist_files',[]).append( + ('bdist_egg',get_python_version(),self.egg_output)) + + + + + def zap_pyfiles(self): + log.info("Removing .py files from temporary directory") + for base,dirs,files in walk_egg(self.bdist_dir): + for name in files: + if name.endswith('.py'): + path = os.path.join(base,name) + log.debug("Deleting %s", path) + os.unlink(path) + + def zip_safe(self): + safe = getattr(self.distribution,'zip_safe',None) + if safe is not None: + return safe + log.warn("zip_safe flag not set; analyzing archive contents...") + return analyze_egg(self.bdist_dir, self.stubs) + + def make_init_files(self): + """Create missing package __init__ files""" + init_files = [] + for base,dirs,files in walk_egg(self.bdist_dir): + if base==self.bdist_dir: + # don't put an __init__ in the root + continue + for name in files: + if name.endswith('.py'): + if '__init__.py' not in files: + pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.') + if self.distribution.has_contents_for(pkg): + log.warn("Creating missing __init__.py for %s",pkg) + filename = os.path.join(base,'__init__.py') + if not self.dry_run: + f = open(filename,'w'); f.write(NS_PKG_STUB) + f.close() + init_files.append(filename) + break + else: + # not a package, don't traverse to subdirectories + dirs[:] = [] + + return init_files + + def gen_header(self): + epm = EntryPoint.parse_map(self.distribution.entry_points or '') + ep = epm.get('setuptools.installation',{}).get('eggsecutable') + if ep is None: + return 'w' # not an eggsecutable, do it the usual way. + + if not ep.attrs or ep.extras: + raise DistutilsSetupError( + "eggsecutable entry point (%r) cannot have 'extras' " + "or refer to a module" % (ep,) + ) + + pyver = sys.version[:3] + pkg = ep.module_name + full = '.'.join(ep.attrs) + base = ep.attrs[0] + basename = os.path.basename(self.egg_output) + + header = ( + "#!/bin/sh\n" + 'if [ `basename $0` = "%(basename)s" ]\n' + 'then exec python%(pyver)s -c "' + "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " + "from %(pkg)s import %(base)s; sys.exit(%(full)s())" + '" "$@"\n' + 'else\n' + ' echo $0 is not the correct name for this egg file.\n' + ' echo Please rename it back to %(basename)s and try again.\n' + ' exec false\n' + 'fi\n' + + ) % locals() + + if not self.dry_run: + mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) + f = open(self.egg_output, 'w') + f.write(header) + f.close() + return 'a' + + + def copy_metadata_to(self, target_dir): + prefix = os.path.join(self.egg_info,'') + for path in self.ei_cmd.filelist.files: + if path.startswith(prefix): + target = os.path.join(target_dir, path[len(prefix):]) + ensure_directory(target) + self.copy_file(path, target) + + def get_ext_outputs(self): + """Get a list of relative paths to C extensions in the output distro""" + + all_outputs = [] + ext_outputs = [] + + paths = {self.bdist_dir:''} + for base, dirs, files in os.walk(self.bdist_dir): + for filename in files: + if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: + all_outputs.append(paths[base]+filename) + for filename in dirs: + paths[os.path.join(base,filename)] = paths[base]+filename+'/' + + if self.distribution.has_ext_modules(): + build_cmd = self.get_finalized_command('build_ext') + for ext in build_cmd.extensions: + if isinstance(ext,Library): + continue + fullname = build_cmd.get_ext_fullname(ext.name) + filename = build_cmd.get_ext_filename(fullname) + if not os.path.basename(filename).startswith('dl-'): + if os.path.exists(os.path.join(self.bdist_dir,filename)): + ext_outputs.append(filename) + + return all_outputs, ext_outputs + + +NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) + + + + +def walk_egg(egg_dir): + """Walk an unpacked egg's contents, skipping the metadata directory""" + walker = os.walk(egg_dir) + base,dirs,files = walker.next() + if 'EGG-INFO' in dirs: + dirs.remove('EGG-INFO') + yield base,dirs,files + for bdf in walker: + yield bdf + +def analyze_egg(egg_dir, stubs): + # check for existing flag in EGG-INFO + for flag,fn in safety_flags.items(): + if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)): + return flag + if not can_scan(): return False + safe = True + for base, dirs, files in walk_egg(egg_dir): + for name in files: + if name.endswith('.py') or name.endswith('.pyw'): + continue + elif name.endswith('.pyc') or name.endswith('.pyo'): + # always scan, even if we already know we're not safe + safe = scan_module(egg_dir, base, name, stubs) and safe + return safe + +def write_safety_flag(egg_dir, safe): + # Write or remove zip safety flag file(s) + for flag,fn in safety_flags.items(): + fn = os.path.join(egg_dir, fn) + if os.path.exists(fn): + if safe is None or bool(safe)!=flag: + os.unlink(fn) + elif safe is not None and bool(safe)==flag: + f=open(fn,'wb'); f.write('\n'); f.close() + +safety_flags = { + True: 'zip-safe', + False: 'not-zip-safe', +} + +def scan_module(egg_dir, base, name, stubs): + """Check whether module possibly uses unsafe-for-zipfile stuff""" + + filename = os.path.join(base,name) + if filename[:-1] in stubs: + return True # Extension module + pkg = base[len(egg_dir)+1:].replace(os.sep,'.') + module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0] + f = open(filename,'rb'); f.read(8) # skip magic & date + code = marshal.load(f); f.close() + safe = True + symbols = dict.fromkeys(iter_symbols(code)) + for bad in ['__file__', '__path__']: + if bad in symbols: + log.warn("%s: module references %s", module, bad) + safe = False + if 'inspect' in symbols: + for bad in [ + 'getsource', 'getabsfile', 'getsourcefile', 'getfile' + 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', + 'getinnerframes', 'getouterframes', 'stack', 'trace' + ]: + if bad in symbols: + log.warn("%s: module MAY be using inspect.%s", module, bad) + safe = False + if '__name__' in symbols and '__main__' in symbols and '.' not in module: + if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5 + log.warn("%s: top-level module may be 'python -m' script", module) + safe = False + return safe + +def iter_symbols(code): + """Yield names and strings used by `code` and its nested code objects""" + for name in code.co_names: yield name + for const in code.co_consts: + if isinstance(const,basestring): + yield const + elif isinstance(const,CodeType): + for name in iter_symbols(const): + yield name + +def can_scan(): + if not sys.platform.startswith('java') and sys.platform != 'cli': + # CPython, PyPy, etc. + return True + log.warn("Unable to analyze compiled code on this platform.") + log.warn("Please ask the author to include a 'zip_safe'" + " setting (either True or False) in the package's setup.py") + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +# Attribute names of options for commands that might need to be convinced to +# install to the egg build directory + +INSTALL_DIRECTORY_ATTRS = [ + 'install_lib', 'install_dir', 'install_data', 'install_base' +] + +def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, + mode='w' +): + """Create a zip file from all the files under 'base_dir'. The output + zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" + Python module (if available) or the InfoZIP "zip" utility (if installed + and found on the default search path). If neither tool is available, + raises DistutilsExecError. Returns the name of the output zip file. + """ + import zipfile + mkpath(os.path.dirname(zip_filename), dry_run=dry_run) + log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) + + def visit(z, dirname, names): + for name in names: + path = os.path.normpath(os.path.join(dirname, name)) + if os.path.isfile(path): + p = path[len(base_dir)+1:] + if not dry_run: + z.write(path, p) + log.debug("adding '%s'" % p) + + if compress is None: + compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits + + compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)] + if not dry_run: + z = zipfile.ZipFile(zip_filename, mode, compression=compression) + os.path.walk(base_dir, visit, z) + z.close() + else: + os.path.walk(base_dir, visit, None) + return zip_filename +# diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/bdist_rpm.py b/setuptools-0.6c16dev3.egg/setuptools/command/bdist_rpm.py new file mode 100644 index 0000000..8c48da3 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/bdist_rpm.py @@ -0,0 +1,82 @@ +# This is just a kludge so that bdist_rpm doesn't guess wrong about the +# distribution name and version, if the egg_info command is going to alter +# them, another kludge to allow you to build old-style non-egg RPMs, and +# finally, a kludge to track .rpm files for uploading when run on Python <2.5. + +from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm +import sys, os + +class bdist_rpm(_bdist_rpm): + + def initialize_options(self): + _bdist_rpm.initialize_options(self) + self.no_egg = None + + if sys.version<"2.5": + # Track for uploading any .rpm file(s) moved to self.dist_dir + def move_file(self, src, dst, level=1): + _bdist_rpm.move_file(self, src, dst, level) + if dst==self.dist_dir and src.endswith('.rpm'): + getattr(self.distribution,'dist_files',[]).append( + ('bdist_rpm', + src.endswith('.src.rpm') and 'any' or sys.version[:3], + os.path.join(dst, os.path.basename(src))) + ) + + def run(self): + self.run_command('egg_info') # ensure distro name is up-to-date + _bdist_rpm.run(self) + + + + + + + + + + + + + + def _make_spec_file(self): + version = self.distribution.get_version() + rpmversion = version.replace('-','_') + spec = _bdist_rpm._make_spec_file(self) + line23 = '%define version '+version + line24 = '%define version '+rpmversion + spec = [ + line.replace( + "Source0: %{name}-%{version}.tar", + "Source0: %{name}-%{unmangled_version}.tar" + ).replace( + "setup.py install ", + "setup.py install --single-version-externally-managed " + ).replace( + "%setup", + "%setup -n %{name}-%{unmangled_version}" + ).replace(line23,line24) + for line in spec + ] + spec.insert(spec.index(line24)+1, "%define unmangled_version "+version) + return spec + + + + + + + + + + + + + + + + + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/bdist_wininst.py b/setuptools-0.6c16dev3.egg/setuptools/command/bdist_wininst.py new file mode 100644 index 0000000..e8521f8 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/bdist_wininst.py @@ -0,0 +1,82 @@ +from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst +import os, sys + +class bdist_wininst(_bdist_wininst): + _good_upload = _bad_upload = None + + def create_exe(self, arcname, fullname, bitmap=None): + _bdist_wininst.create_exe(self, arcname, fullname, bitmap) + installer_name = self.get_installer_filename(fullname) + if self.target_version: + pyversion = self.target_version + # fix 2.5+ bdist_wininst ignoring --target-version spec + self._bad_upload = ('bdist_wininst', 'any', installer_name) + else: + pyversion = 'any' + self._good_upload = ('bdist_wininst', pyversion, installer_name) + + def _fix_upload_names(self): + good, bad = self._good_upload, self._bad_upload + dist_files = getattr(self.distribution, 'dist_files', []) + if bad in dist_files: + dist_files.remove(bad) + if good not in dist_files: + dist_files.append(good) + + def reinitialize_command (self, command, reinit_subcommands=0): + cmd = self.distribution.reinitialize_command( + command, reinit_subcommands) + if command in ('install', 'install_lib'): + cmd.install_lib = None # work around distutils bug + return cmd + + def run(self): + self._is_running = True + try: + _bdist_wininst.run(self) + self._fix_upload_names() + finally: + self._is_running = False + + + if not hasattr(_bdist_wininst, 'get_installer_filename'): + def get_installer_filename(self, fullname): + # Factored out to allow overriding in subclasses + if self.target_version: + # if we create an installer for a specific python version, + # it's better to include this in the name + installer_name = os.path.join(self.dist_dir, + "%s.win32-py%s.exe" % + (fullname, self.target_version)) + else: + installer_name = os.path.join(self.dist_dir, + "%s.win32.exe" % fullname) + return installer_name + # get_installer_filename() + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/build_ext.py b/setuptools-0.6c16dev3.egg/setuptools/command/build_ext.py new file mode 100644 index 0000000..e0d5284 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/build_ext.py @@ -0,0 +1,285 @@ +from distutils.command.build_ext import build_ext as _du_build_ext +try: + # Attempt to use Pyrex for building extensions, if available + from Pyrex.Distutils.build_ext import build_ext as _build_ext +except ImportError: + _build_ext = _du_build_ext + +import os, sys +from distutils.file_util import copy_file +from setuptools.extension import Library +from distutils.ccompiler import new_compiler +from distutils.sysconfig import customize_compiler, get_config_var +get_config_var("LDSHARED") # make sure _config_vars is initialized +from distutils.sysconfig import _config_vars +from distutils import log +from distutils.errors import * + +have_rtld = False +use_stubs = False +libtype = 'shared' + +if sys.platform == "darwin": + use_stubs = True +elif os.name != 'nt': + try: + from dl import RTLD_NOW + have_rtld = True + use_stubs = True + except ImportError: + pass + +def if_dl(s): + if have_rtld: + return s + return '' + + + + + + +class build_ext(_build_ext): + def run(self): + """Build extensions in build directory, then copy if --inplace""" + old_inplace, self.inplace = self.inplace, 0 + _build_ext.run(self) + self.inplace = old_inplace + if old_inplace: + self.copy_extensions_to_source() + + def copy_extensions_to_source(self): + build_py = self.get_finalized_command('build_py') + for ext in self.extensions: + fullname = self.get_ext_fullname(ext.name) + filename = self.get_ext_filename(fullname) + modpath = fullname.split('.') + package = '.'.join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + dest_filename = os.path.join(package_dir,os.path.basename(filename)) + src_filename = os.path.join(self.build_lib,filename) + + # Always copy, even if source is older than destination, to ensure + # that the right extensions for the current Python/platform are + # used. + copy_file( + src_filename, dest_filename, verbose=self.verbose, + dry_run=self.dry_run + ) + if ext._needs_stub: + self.write_stub(package_dir or os.curdir, ext, True) + + + if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'): + # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 + def swig_sources(self, sources, *otherargs): + # first do any Pyrex processing + sources = _build_ext.swig_sources(self, sources) or sources + # Then do any actual SWIG stuff on the remainder + return _du_build_ext.swig_sources(self, sources, *otherargs) + + + + def get_ext_filename(self, fullname): + filename = _build_ext.get_ext_filename(self,fullname) + if fullname in self.ext_map: + ext = self.ext_map[fullname] + if isinstance(ext,Library): + fn, ext = os.path.splitext(filename) + return self.shlib_compiler.library_filename(fn,libtype) + elif use_stubs and ext._links_to_dynamic: + d,fn = os.path.split(filename) + return os.path.join(d,'dl-'+fn) + return filename + + def initialize_options(self): + _build_ext.initialize_options(self) + self.shlib_compiler = None + self.shlibs = [] + self.ext_map = {} + + def finalize_options(self): + _build_ext.finalize_options(self) + self.extensions = self.extensions or [] + self.check_extensions_list(self.extensions) + self.shlibs = [ext for ext in self.extensions + if isinstance(ext,Library)] + if self.shlibs: + self.setup_shlib_compiler() + for ext in self.extensions: + ext._full_name = self.get_ext_fullname(ext.name) + for ext in self.extensions: + fullname = ext._full_name + self.ext_map[fullname] = ext + ltd = ext._links_to_dynamic = \ + self.shlibs and self.links_to_dynamic(ext) or False + ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library) + filename = ext._file_name = self.get_ext_filename(fullname) + libdir = os.path.dirname(os.path.join(self.build_lib,filename)) + if ltd and libdir not in ext.library_dirs: + ext.library_dirs.append(libdir) + if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: + ext.runtime_library_dirs.append(os.curdir) + + def setup_shlib_compiler(self): + compiler = self.shlib_compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=self.force + ) + if sys.platform == "darwin": + tmp = _config_vars.copy() + try: + # XXX Help! I don't have any idea whether these are right... + _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" + _config_vars['CCSHARED'] = " -dynamiclib" + _config_vars['SO'] = ".dylib" + customize_compiler(compiler) + finally: + _config_vars.clear() + _config_vars.update(tmp) + else: + customize_compiler(compiler) + + if self.include_dirs is not None: + compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name,value) in self.define: + compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + compiler.undefine_macro(macro) + if self.libraries is not None: + compiler.set_libraries(self.libraries) + if self.library_dirs is not None: + compiler.set_library_dirs(self.library_dirs) + if self.rpath is not None: + compiler.set_runtime_library_dirs(self.rpath) + if self.link_objects is not None: + compiler.set_link_objects(self.link_objects) + + # hack so distutils' build_extension() builds a library instead + compiler.link_shared_object = link_shared_object.__get__(compiler) + + + + def get_export_symbols(self, ext): + if isinstance(ext,Library): + return ext.export_symbols + return _build_ext.get_export_symbols(self,ext) + + def build_extension(self, ext): + _compiler = self.compiler + try: + if isinstance(ext,Library): + self.compiler = self.shlib_compiler + _build_ext.build_extension(self,ext) + if ext._needs_stub: + self.write_stub( + self.get_finalized_command('build_py').build_lib, ext + ) + finally: + self.compiler = _compiler + + def links_to_dynamic(self, ext): + """Return true if 'ext' links to a dynamic lib in the same package""" + # XXX this should check to ensure the lib is actually being built + # XXX as dynamic, and not just using a locally-found version or a + # XXX static-compiled version + libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) + pkg = '.'.join(ext._full_name.split('.')[:-1]+['']) + for libname in ext.libraries: + if pkg+libname in libnames: return True + return False + + def get_outputs(self): + outputs = _build_ext.get_outputs(self) + optimize = self.get_finalized_command('build_py').optimize + for ext in self.extensions: + if ext._needs_stub: + base = os.path.join(self.build_lib, *ext._full_name.split('.')) + outputs.append(base+'.py') + outputs.append(base+'.pyc') + if optimize: + outputs.append(base+'.pyo') + return outputs + + def write_stub(self, output_dir, ext, compile=False): + log.info("writing stub loader for %s to %s",ext._full_name, output_dir) + stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py' + if compile and os.path.exists(stub_file): + raise DistutilsError(stub_file+" already exists! Please delete.") + if not self.dry_run: + f = open(stub_file,'w') + f.write('\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __file__, __loader__", + " import sys, os, pkg_resources, imp"+if_dl(", dl"), + " __file__ = pkg_resources.resource_filename(__name__,%r)" + % os.path.basename(ext._file_name), + " del __bootstrap__", + " if '__loader__' in globals():", + " del __loader__", + if_dl(" old_flags = sys.getdlopenflags()"), + " old_dir = os.getcwd()", + " try:", + " os.chdir(os.path.dirname(__file__))", + if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), + " imp.load_dynamic(__name__,__file__)", + " finally:", + if_dl(" sys.setdlopenflags(old_flags)"), + " os.chdir(old_dir)", + "__bootstrap__()", + "" # terminal \n + ])) + f.close() + if compile: + from distutils.util import byte_compile + byte_compile([stub_file], optimize=0, + force=True, dry_run=self.dry_run) + optimize = self.get_finalized_command('install_lib').optimize + if optimize > 0: + byte_compile([stub_file], optimize=optimize, + force=True, dry_run=self.dry_run) + if os.path.exists(stub_file) and not self.dry_run: + os.unlink(stub_file) + + +if use_stubs or os.name=='nt': + # Build shared libraries + # + def link_shared_object(self, objects, output_libname, output_dir=None, + libraries=None, library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=0, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None + ): self.link( + self.SHARED_LIBRARY, objects, output_libname, + output_dir, libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, extra_preargs, extra_postargs, + build_temp, target_lang + ) +else: + # Build static libraries everywhere else + libtype = 'static' + + def link_shared_object(self, objects, output_libname, output_dir=None, + libraries=None, library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=0, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None + ): + # XXX we need to either disallow these attrs on Library instances, + # or warn/abort here if set, or something... + #libraries=None, library_dirs=None, runtime_library_dirs=None, + #export_symbols=None, extra_preargs=None, extra_postargs=None, + #build_temp=None + + assert output_dir is None # distutils build_ext doesn't pass this + output_dir,filename = os.path.split(output_libname) + basename, ext = os.path.splitext(filename) + if self.library_filename("x").startswith('lib'): + # strip 'lib' prefix; this is kludgy if some platform uses + # a different prefix + basename = basename[3:] + + self.create_static_lib( + objects, basename, output_dir, debug, target_lang + ) diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/build_py.py b/setuptools-0.6c16dev3.egg/setuptools/command/build_py.py new file mode 100644 index 0000000..c5e3d7f --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/build_py.py @@ -0,0 +1,211 @@ +import os.path, sys, fnmatch +from distutils.command.build_py import build_py as _build_py +from distutils.util import convert_path +from glob import glob + +class build_py(_build_py): + """Enhanced 'build_py' command that includes data files with packages + + The data files are specified via a 'package_data' argument to 'setup()'. + See 'setuptools.dist.Distribution' for more details. + + Also, this version of the 'build_py' command allows you to specify both + 'py_modules' and 'packages' in the same setup operation. + """ + def finalize_options(self): + _build_py.finalize_options(self) + self.package_data = self.distribution.package_data + self.exclude_package_data = self.distribution.exclude_package_data or {} + if 'data_files' in self.__dict__: del self.__dict__['data_files'] + + def run(self): + self.old_run() + if sys.platform == "win32": + from setuptools.command.scriptsetup import do_scriptsetup + do_scriptsetup() + + def old_run(self): + """Build modules, packages, and copy data files to build directory""" + if not self.py_modules and not self.packages: + return + + if self.py_modules: + self.build_modules() + + if self.packages: + self.build_packages() + self.build_package_data() + + # Only compile actual .py files, using our base class' idea of what our + # output files are. + self.byte_compile(_build_py.get_outputs(self, include_bytecode=0)) + + def __getattr__(self,attr): + if attr=='data_files': # lazily compute data files + self.data_files = files = self._get_data_files(); return files + return _build_py.__getattr__(self,attr) + + def _get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + self.analyze_manifest() + data = [] + for package in self.packages or (): + # Locate package source directory + src_dir = self.get_package_dir(package) + + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + + # Length of path to strip from found files + plen = len(src_dir)+1 + + # Strip directory from globbed filenames + filenames = [ + file[plen:] for file in self.find_data_files(package, src_dir) + ] + data.append( (package, src_dir, build_dir, filenames) ) + return data + + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir'""" + globs = (self.package_data.get('', []) + + self.package_data.get(package, [])) + files = self.manifest_files.get(package, [])[:] + for pattern in globs: + # Each pattern has to be converted to a platform-specific path + files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) + return self.exclude_data_files(package, src_dir, files) + + def build_package_data(self): + """Copy data files into build directory""" + lastdir = None + for package, src_dir, build_dir, filenames in self.data_files: + for filename in filenames: + target = os.path.join(build_dir, filename) + self.mkpath(os.path.dirname(target)) + self.copy_file(os.path.join(src_dir, filename), target) + + + def analyze_manifest(self): + self.manifest_files = mf = {} + if not self.distribution.include_package_data: + return + src_dirs = {} + for package in self.packages or (): + # Locate package source directory + src_dirs[assert_relative(self.get_package_dir(package))] = package + + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + for path in ei_cmd.filelist.files: + d,f = os.path.split(assert_relative(path)) + prev = None + oldf = f + while d and d!=prev and d not in src_dirs: + prev = d + d, df = os.path.split(d) + f = os.path.join(df, f) + if d in src_dirs: + if path.endswith('.py') and f==oldf: + continue # it's a module, not data + mf.setdefault(src_dirs[d],[]).append(path) + + def get_data_files(self): pass # kludge 2.4 for lazy computation + + if sys.version<"2.4": # Python 2.4 already has this code + def get_outputs(self, include_bytecode=1): + """Return complete list of files copied to the build directory + + This includes both '.py' files and data files, as well as '.pyc' + and '.pyo' files if 'include_bytecode' is true. (This method is + needed for the 'install_lib' command to do its job properly, and to + generate a correct installation manifest.) + """ + return _build_py.get_outputs(self, include_bytecode) + [ + os.path.join(build_dir, filename) + for package, src_dir, build_dir,filenames in self.data_files + for filename in filenames + ] + + def check_package(self, package, package_dir): + """Check namespace packages' __init__ for declare_namespace""" + try: + return self.packages_checked[package] + except KeyError: + pass + + init_py = _build_py.check_package(self, package, package_dir) + self.packages_checked[package] = init_py + + if not init_py or not self.distribution.namespace_packages: + return init_py + + for pkg in self.distribution.namespace_packages: + if pkg==package or pkg.startswith(package+'.'): + break + else: + return init_py + + f = open(init_py,'rU') + if 'declare_namespace' not in f.read(): + from distutils.errors import DistutilsError + raise DistutilsError( + "Namespace package problem: %s is a namespace package, but its\n" + "__init__.py does not call declare_namespace()! Please fix it.\n" + '(See the setuptools manual under "Namespace Packages" for ' + "details.)\n" % (package,) + ) + f.close() + return init_py + + def initialize_options(self): + self.packages_checked={} + _build_py.initialize_options(self) + + + + + + + + def exclude_data_files(self, package, src_dir, files): + """Filter filenames for package's data files in 'src_dir'""" + globs = (self.exclude_package_data.get('', []) + + self.exclude_package_data.get(package, [])) + bad = [] + for pattern in globs: + bad.extend( + fnmatch.filter( + files, os.path.join(src_dir, convert_path(pattern)) + ) + ) + bad = dict.fromkeys(bad) + seen = {} + return [ + f for f in files if f not in bad + and f not in seen and seen.setdefault(f,1) # ditch dupes + ] + + +def assert_relative(path): + if not os.path.isabs(path): + return path + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError( +"""Error: setup script specifies an absolute path: + + %s + +setup() arguments must *always* be /-separated paths relative to the +setup.py directory, *never* absolute paths. +""" % path + ) + + + + + + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/develop.py b/setuptools-0.6c16dev3.egg/setuptools/command/develop.py new file mode 100644 index 0000000..303f488 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/develop.py @@ -0,0 +1,165 @@ +from setuptools.command.easy_install import easy_install +from distutils.util import convert_path +from pkg_resources import Distribution, PathMetadata, normalize_path +from distutils import log +from distutils.errors import * +import sys, os, setuptools, glob + +class develop(easy_install): + """Set up package for development""" + + description = "install package in 'development mode'" + + user_options = easy_install.user_options + [ + ("uninstall", "u", "Uninstall this source package"), + ("egg-path=", None, "Set the path to be used in the .egg-link file"), + ] + + boolean_options = easy_install.boolean_options + ['uninstall'] + + command_consumes_arguments = False # override base + + def run(self): + self.old_run() + if sys.platform == "win32": + from setuptools.command.scriptsetup import do_scriptsetup + do_scriptsetup() + + def old_run(self): + if self.uninstall: + self.multi_version = True + self.uninstall_link() + else: + self.install_for_development() + self.warn_deprecated_options() + + def initialize_options(self): + self.uninstall = None + self.egg_path = None + easy_install.initialize_options(self) + self.setup_path = None + self.always_copy_from = '.' # always copy eggs installed in curdir + + def finalize_options(self): + ei = self.get_finalized_command("egg_info") + if ei.broken_egg_info: + raise DistutilsError( + "Please rename %r to %r before using 'develop'" + % (ei.egg_info, ei.broken_egg_info) + ) + self.args = [ei.egg_name] + easy_install.finalize_options(self) + # pick up setup-dir .egg files only: no .egg-info + self.package_index.scan(glob.glob('*.egg')) + + self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link') + self.egg_base = ei.egg_base + if self.egg_path is None: + self.egg_path = os.path.abspath(ei.egg_base) + + target = normalize_path(self.egg_base) + if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target: + raise DistutilsOptionError( + "--egg-path must be a relative path from the install" + " directory to "+target + ) + + # Make a distribution for the package's source + self.dist = Distribution( + target, + PathMetadata(target, os.path.abspath(ei.egg_info)), + project_name = ei.egg_name + ) + + p = self.egg_base.replace(os.sep,'/') + if p!= os.curdir: + p = '../' * (p.count('/')+1) + self.setup_path = p + p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) + if p != normalize_path(os.curdir): + raise DistutilsOptionError( + "Can't get a consistent path to setup script from" + " installation directory", p, normalize_path(os.curdir)) + + def install_for_development(self): + # Ensure metadata is up-to-date + self.run_command('egg_info') + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + self.install_site_py() # ensure that target dir is site-safe + if setuptools.bootstrap_install_from: + self.easy_install(setuptools.bootstrap_install_from) + setuptools.bootstrap_install_from = None + + # create an .egg-link in the installation dir, pointing to our egg + log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) + if not self.dry_run: + f = open(self.egg_link,"w") + f.write(self.egg_path + "\n" + self.setup_path) + f.close() + # postprocess the installed distro, fixing up .pth, installing scripts, + # and handling requirements + self.process_distribution(None, self.dist, not self.no_deps) + + + def uninstall_link(self): + if os.path.exists(self.egg_link): + log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) + contents = [line.rstrip() for line in file(self.egg_link)] + if contents not in ([self.egg_path], [self.egg_path, self.setup_path]): + log.warn("Link points to %s: uninstall aborted", contents) + return + if not self.dry_run: + os.unlink(self.egg_link) + if not self.dry_run: + self.update_pth(self.dist) # remove any .pth link to us + if self.distribution.scripts: + # XXX should also check for entry point scripts! + log.warn("Note: you must uninstall or replace scripts manually!") + + + + + + def install_egg_scripts(self, dist): + if dist is not self.dist: + # Installing a dependency, so fall back to normal behavior + return easy_install.install_egg_scripts(self,dist) + + # create wrapper scripts in the script dir, pointing to dist.scripts + + # new-style... + self.install_wrapper_scripts(dist) + + # ...and old-style + for script_name in self.distribution.scripts or []: + script_path = os.path.abspath(convert_path(script_name)) + script_name = os.path.basename(script_path) + f = open(script_path,'rU') + script_text = f.read() + f.close() + self.install_script(dist, script_name, script_text, script_path) + + + + + + + + + + + + + + + + + + + + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/easy_install.py b/setuptools-0.6c16dev3.egg/setuptools/command/easy_install.py new file mode 100755 index 0000000..8ac42b4 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/easy_install.py @@ -0,0 +1,1739 @@ +#!python +"""\ +Easy Install +------------ + +A tool for doing automatic download/extract/build of distutils-based Python +packages. For detailed documentation, see the accompanying EasyInstall.txt +file, or visit the `EasyInstall home page`__. + +__ http://peak.telecommunity.com/DevCenter/EasyInstall +""" +import sys, os.path, zipimport, shutil, tempfile, zipfile, re, stat, random +from glob import glob +from setuptools import Command, _dont_write_bytecode +from setuptools import __version__ as setuptools_version +from setuptools.sandbox import run_setup +from distutils import log, dir_util +from distutils.sysconfig import get_python_lib +from distutils.errors import DistutilsArgError, DistutilsOptionError, \ + DistutilsError +from setuptools.archive_util import unpack_archive +from setuptools.package_index import PackageIndex, parse_bdist_wininst +from setuptools.package_index import URL_SCHEME +from setuptools.command import bdist_egg, egg_info +from pkg_resources import * +sys_executable = os.path.normpath(sys.executable) + +__all__ = [ + 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', + 'main', 'get_exe_prefixes', +] + +def samefile(p1,p2): + if hasattr(os.path,'samefile') and ( + os.path.exists(p1) and os.path.exists(p2) + ): + return os.path.samefile(p1,p2) + return ( + os.path.normpath(os.path.normcase(p1)) == + os.path.normpath(os.path.normcase(p2)) + ) + +class easy_install(Command): + """Manage a download/build/install process""" + description = "Find/get/install Python packages" + command_consumes_arguments = True + + user_options = [ + ('prefix=', None, "installation prefix"), + ("zip-ok", "z", "install package as a zipfile"), + ("multi-version", "m", "make apps have to require() a version"), + ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), + ("install-dir=", "d", "install package to DIR"), + ("script-dir=", "s", "install scripts to DIR"), + ("exclude-scripts", "x", "Don't install scripts"), + ("always-copy", "a", "Copy all needed packages to install dir"), + ("index-url=", "i", "base URL of Python Package Index"), + ("find-links=", "f", "additional URL(s) to search for packages"), + ("delete-conflicting", "D", "no longer needed; don't use this"), + ("ignore-conflicts-at-my-risk", None, + "no longer needed; don't use this"), + ("build-directory=", "b", + "download/extract/build in DIR; keep the results"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('record=', None, + "filename in which to record list of installed files"), + ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), + ('site-dirs=','S',"list of directories where .pth files work"), + ('editable', 'e', "Install specified packages in editable form"), + ('no-deps', 'N', "don't install dependencies"), + ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), + ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"), + ] + boolean_options = [ + 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', + 'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable', + 'no-deps', 'local-snapshots-ok', + ] + negative_opt = {'always-unzip': 'zip-ok'} + create_index = PackageIndex + + def initialize_options(self): + self.zip_ok = self.local_snapshots_ok = None + self.install_dir = self.script_dir = self.exclude_scripts = None + self.index_url = None + self.find_links = None + self.build_directory = None + self.args = None + self.optimize = self.record = None + self.upgrade = self.always_copy = self.multi_version = None + self.editable = self.no_deps = self.allow_hosts = None + self.root = self.prefix = self.no_report = None + + # Options not specifiable via command line + self.package_index = None + self.pth_file = self.always_copy_from = None + self.delete_conflicting = None + self.ignore_conflicts_at_my_risk = None + self.site_dirs = None + self.installed_projects = {} + self.sitepy_installed = False + # Always read easy_install options, even if we are subclassed, or have + # an independent instance created. This ensures that defaults will + # always come from the standard configuration file(s)' "easy_install" + # section, even if this is a "develop" or "install" command, or some + # other embedding. + self._dry_run = None + self.verbose = self.distribution.verbose + self.distribution._set_command_options( + self, self.distribution.get_option_dict('easy_install') + ) + + def delete_blockers(self, blockers): + for filename in blockers: + if os.path.exists(filename) or os.path.islink(filename): + log.info("Deleting %s", filename) + if not self.dry_run: + if os.path.isdir(filename) and not os.path.islink(filename): + rmtree(filename) + else: + os.unlink(filename) + + def finalize_options(self): + self._expand('install_dir','script_dir','build_directory','site_dirs') + # If a non-default installation directory was specified, default the + # script directory to match it. + if self.script_dir is None: + self.script_dir = self.install_dir + + # Let install_dir get set by install_lib command, which in turn + # gets its info from the install command, and takes into account + # --prefix and --home and all that other crud. + self.set_undefined_options('install_lib', + ('install_dir','install_dir') + ) + # Likewise, set default script_dir from 'install_scripts.install_dir' + self.set_undefined_options('install_scripts', + ('install_dir', 'script_dir') + ) + # default --record from the install command + self.set_undefined_options('install', ('record', 'record')) + normpath = map(normalize_path, sys.path) + self.all_site_dirs = get_site_dirs() + if self.site_dirs is not None: + site_dirs = [ + os.path.expanduser(s.strip()) for s in self.site_dirs.split(',') + ] + for d in site_dirs: + if not os.path.isdir(d): + log.warn("%s (in --site-dirs) does not exist", d) + elif normalize_path(d) not in normpath: + raise DistutilsOptionError( + d+" (in --site-dirs) is not on sys.path" + ) + else: + self.all_site_dirs.append(normalize_path(d)) + if not self.editable: self.check_site_dir() + self.index_url = self.index_url or "http://pypi.python.org/simple" + self.shadow_path = self.all_site_dirs[:] + for path_item in self.install_dir, normalize_path(self.script_dir): + if path_item not in self.shadow_path: + self.shadow_path.insert(0, path_item) + + if self.allow_hosts is not None: + hosts = [s.strip() for s in self.allow_hosts.split(',')] + else: + hosts = ['*'] + if self.package_index is None: + self.package_index = self.create_index( + self.index_url, search_path = self.shadow_path+sys.path, hosts=hosts, + ) + self.local_index = Environment(self.shadow_path+sys.path) + + if self.find_links is not None: + if isinstance(self.find_links, basestring): + self.find_links = self.find_links.split() + else: + self.find_links = [] + if self.local_snapshots_ok: + self.package_index.scan_egg_links(self.shadow_path+sys.path) + self.package_index.add_find_links(self.find_links) + self.set_undefined_options('install_lib', ('optimize','optimize')) + if not isinstance(self.optimize,int): + try: + self.optimize = int(self.optimize) + if not (0 <= self.optimize <= 2): raise ValueError + except ValueError: + raise DistutilsOptionError("--optimize must be 0, 1, or 2") + + if self.delete_conflicting and self.ignore_conflicts_at_my_risk: + raise DistutilsOptionError( + "Can't use both --delete-conflicting and " + "--ignore-conflicts-at-my-risk at the same time" + ) + if self.editable and not self.build_directory: + raise DistutilsArgError( + "Must specify a build directory (-b) when using --editable" + ) + if not self.args: + raise DistutilsArgError( + "No urls, filenames, or requirements specified (see --help)") + + self.outputs = [] + + def run(self): + if self.verbose!=self.distribution.verbose: + log.set_verbosity(self.verbose) + try: + for spec in self.args: + self.easy_install(spec, not self.no_deps) + if self.record: + outputs = self.outputs + if self.root: # strip any package prefix + root_len = len(self.root) + for counter in xrange(len(outputs)): + outputs[counter] = outputs[counter][root_len:] + from distutils import file_util + self.execute( + file_util.write_file, (self.record, outputs), + "writing list of installed files to '%s'" % + self.record + ) + self.warn_deprecated_options() + finally: + log.set_verbosity(self.distribution.verbose) + + def pseudo_tempname(self): + """Return a pseudo-tempname base in the install directory. + This code is intentionally naive; if a malicious party can write to + the target directory you're already in deep doodoo. + """ + try: + pid = os.getpid() + except: + pid = random.randint(0,sys.maxint) + return os.path.join(self.install_dir, "test-easy-install-%s" % pid) + + def warn_deprecated_options(self): + if self.delete_conflicting or self.ignore_conflicts_at_my_risk: + log.warn( + "Note: The -D, --delete-conflicting and" + " --ignore-conflicts-at-my-risk no longer have any purpose" + " and should not be used." + ) + + def check_site_dir(self): + """Verify that self.install_dir is .pth-capable dir, if needed""" + instdir = normalize_path(self.install_dir) + pth_file = os.path.join(instdir,'easy-install.pth') + + # mkdir it if necessary + try: + os.makedirs(instdir) + except OSError: + # Oh well -- hopefully this error simply means that it is already there. + # If not the subsequent write test will identify the problem. + pass + # add it to site dirs + self.all_site_dirs.append(instdir) + + # Is it a configured, PYTHONPATH, implicit, or explicit site dir? + is_site_dir = instdir in self.all_site_dirs + + if not is_site_dir and not self.multi_version: + # No? Then directly test whether it does .pth file processing + is_site_dir = self.check_pth_processing() + else: + # make sure we can write to target dir + testfile = self.pseudo_tempname()+'.write-test' + test_exists = os.path.exists(testfile) + try: + if test_exists: os.unlink(testfile) + open(testfile,'w').close() + os.unlink(testfile) + except (OSError,IOError): + self.cant_write_to_target() + + if not is_site_dir and not self.multi_version: + # Can't install non-multi to non-site dir + log.warn(self.no_default_version_msg()) + + if is_site_dir: + if self.pth_file is None: + self.pth_file = PthDistributions(pth_file, self.all_site_dirs) + else: + self.pth_file = None + + if self.multi_version and not os.path.exists(pth_file): + self.sitepy_installed = True # don't need site.py in this case + self.pth_file = None # and don't create a .pth file + self.install_dir = instdir + + def cant_write_to_target(self): + msg = """can't create or remove files in install directory + +The following error occurred while trying to add or remove files in the +installation directory: + + %s + +The installation directory you specified (via --install-dir, --prefix, or +the distutils default setting) was: + + %s +""" % (sys.exc_info()[1], self.install_dir,) + + if not os.path.exists(self.install_dir): + msg += """ +This directory does not currently exist. Please create it and try again, or +choose a different installation directory (using the -d or --install-dir +option). +""" + else: + msg += """ +Perhaps your account does not have write access to this directory? If the +installation directory is a system-owned directory, you may need to sign in +as the administrator or "root" account. If you do not have administrative +access to this machine, you may wish to choose a different installation +directory, preferably one that is listed in your PYTHONPATH environment +variable. + +For information on other options, you may wish to consult the +documentation at: + + http://peak.telecommunity.com/EasyInstall.html + +Please make the appropriate changes for your system and try again. +""" + raise DistutilsError(msg) + + + + + def check_pth_processing(self): + """Empirically verify whether .pth files are supported in inst. dir""" + instdir = self.install_dir + log.info("Checking .pth file support in %s", instdir) + pth_file = self.pseudo_tempname()+".pth" + ok_file = pth_file+'.ok' + ok_exists = os.path.exists(ok_file) + try: + if ok_exists: os.unlink(ok_file) + f = open(pth_file,'w') + except (OSError,IOError): + self.cant_write_to_target() + else: + try: + f.write("import os;open(%r,'w').write('OK')\n" % (ok_file,)) + f.close(); f=None + executable = sys.executable + if os.name=='nt': + dirname,basename = os.path.split(executable) + alt = os.path.join(dirname,'pythonw.exe') + if basename.lower()=='python.exe' and os.path.exists(alt): + # use pythonw.exe to avoid opening a console window + executable = alt + + from distutils.spawn import spawn + spawn([executable,'-E','-c','pass'],0) + + if os.path.exists(ok_file): + log.info( + "TEST PASSED: %s appears to support .pth files", + instdir + ) + return True + finally: + if f: f.close() + if os.path.exists(ok_file): os.unlink(ok_file) + if os.path.exists(pth_file): os.unlink(pth_file) + if not self.multi_version: + log.warn("TEST FAILED: %s does NOT support .pth files", instdir) + return False + + def install_egg_scripts(self, dist): + """Write all the scripts for `dist`, unless scripts are excluded""" + if not self.exclude_scripts and dist.metadata_isdir('scripts'): + for script_name in dist.metadata_listdir('scripts'): + self.install_script( + dist, script_name, + dist.get_metadata('scripts/'+script_name) + ) + self.install_wrapper_scripts(dist) + + def add_output(self, path): + if os.path.isdir(path): + for base, dirs, files in os.walk(path): + for filename in files: + self.outputs.append(os.path.join(base,filename)) + else: + self.outputs.append(path) + + def not_editable(self, spec): + if self.editable: + raise DistutilsArgError( + "Invalid argument %r: you can't use filenames or URLs " + "with --editable (except via the --find-links option)." + % (spec,) + ) + + def check_editable(self,spec): + if not self.editable: + return + + if os.path.exists(os.path.join(self.build_directory, spec.key)): + raise DistutilsArgError( + "%r already exists in %s; can't do a checkout there" % + (spec.key, self.build_directory) + ) + + + + + + + def easy_install(self, spec, deps=False): + tmpdir = tempfile.mkdtemp(prefix="easy_install-") + download = None + if not self.editable: self.install_site_py() + + try: + if not isinstance(spec,Requirement): + if URL_SCHEME(spec): + # It's a url, download it to tmpdir and process + self.not_editable(spec) + download = self.package_index.download(spec, tmpdir) + return self.install_item(None, download, tmpdir, deps, True) + + elif os.path.exists(spec): + # Existing file or directory, just process it directly + self.not_editable(spec) + return self.install_item(None, spec, tmpdir, deps, True) + else: + spec = parse_requirement_arg(spec) + + self.check_editable(spec) + dist = self.package_index.fetch_distribution( + spec, tmpdir, self.upgrade, self.editable, not self.always_copy, + self.local_index + ) + if dist is None: + msg = "Could not find suitable distribution for %r" % spec + if self.always_copy: + msg+=" (--always-copy skips system and development eggs)" + raise DistutilsError(msg) + elif dist.precedence==DEVELOP_DIST: + # .egg-info dists don't need installing, just process deps + self.process_distribution(spec, dist, deps, "Using") + return dist + else: + return self.install_item(spec, dist.location, tmpdir, deps) + + finally: + if os.path.exists(tmpdir): + rmtree(tmpdir) + + def install_item(self, spec, download, tmpdir, deps, install_needed=False): + + # Installation is also needed if file in tmpdir or is not an egg + install_needed = install_needed or self.always_copy + install_needed = install_needed or os.path.dirname(download) == tmpdir + install_needed = install_needed or not download.endswith('.egg') + install_needed = install_needed or ( + self.always_copy_from is not None and + os.path.dirname(normalize_path(download)) == + normalize_path(self.always_copy_from) + ) + + if spec and not install_needed: + # at this point, we know it's a local .egg, we just don't know if + # it's already installed. + for dist in self.local_index[spec.project_name]: + if dist.location==download: + break + else: + install_needed = True # it's not in the local index + + log.info("Processing %s", os.path.basename(download)) + + if install_needed: + dists = self.install_eggs(spec, download, tmpdir) + for dist in dists: + self.process_distribution(spec, dist, deps) + else: + dists = [self.check_conflicts(self.egg_distribution(download))] + self.process_distribution(spec, dists[0], deps, "Using") + + if spec is not None: + for dist in dists: + if dist in spec: + return dist + + + + + + + def process_distribution(self, requirement, dist, deps=True, *info): + self.update_pth(dist) + self.package_index.add(dist) + self.local_index.add(dist) + self.install_egg_scripts(dist) + self.installed_projects[dist.key] = dist + log.info(self.installation_report(requirement, dist, *info)) + if dist.has_metadata('dependency_links.txt'): + self.package_index.add_find_links( + dist.get_metadata_lines('dependency_links.txt') + ) + if not deps and not self.always_copy: + return + elif requirement is not None and dist.key != requirement.key: + log.warn("Skipping dependencies for %s", dist) + return # XXX this is not the distribution we were looking for + elif requirement is None or dist not in requirement: + # if we wound up with a different version, resolve what we've got + distreq = dist.as_requirement() + requirement = requirement or distreq + requirement = Requirement( + distreq.project_name, distreq.specs, requirement.extras + ) + log.info("Processing dependencies for %s", requirement) + try: + distros = WorkingSet([]).resolve( + [requirement], self.local_index, self.easy_install + ) + except DistributionNotFound, e: + raise DistutilsError( + "Could not find required distribution %s" % e.args + ) + except VersionConflict, e: + raise DistutilsError( + "Installed distribution %s conflicts with requirement %s" + % e.args + ) + if self.always_copy or self.always_copy_from: + # Force all the relevant distros to be copied or activated + for dist in distros: + if dist.key not in self.installed_projects: + self.easy_install(dist.as_requirement()) + log.info("Finished processing dependencies for %s", requirement) + + def should_unzip(self, dist): + if self.zip_ok is not None: + return not self.zip_ok + if dist.has_metadata('not-zip-safe'): + return True + if not dist.has_metadata('zip-safe'): + return True + return False + + def maybe_move(self, spec, dist_filename, setup_base): + dst = os.path.join(self.build_directory, spec.key) + if os.path.exists(dst): + log.warn( + "%r already exists in %s; build directory %s will not be kept", + spec.key, self.build_directory, setup_base + ) + return setup_base + if os.path.isdir(dist_filename): + setup_base = dist_filename + else: + if os.path.dirname(dist_filename)==setup_base: + os.unlink(dist_filename) # get it out of the tmp dir + contents = os.listdir(setup_base) + if len(contents)==1: + dist_filename = os.path.join(setup_base,contents[0]) + if os.path.isdir(dist_filename): + # if the only thing there is a directory, move it instead + setup_base = dist_filename + ensure_directory(dst); shutil.move(setup_base, dst) + return dst + + def install_wrapper_scripts(self, dist): + if not self.exclude_scripts: + for args in get_script_args(dist, script_dir=self.script_dir): + self.write_script(*args) + + + + def install_script(self, dist, script_name, script_text, dev_path=None): + """Generate a legacy script wrapper and install it""" + spec = str(dist.as_requirement()) + is_script = is_python_script(script_text, script_name) + + requires = [spec] + [str(r) for r in dist.requires()] + if is_script and dev_path: + script_text = get_script_header(script_text) + ( + "# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r\n" + "__requires__ = %(requires)r\n" + "from pkg_resources import require; require(%(spec)r)\n" + "del require\n" + "__file__ = %(dev_path)r\n" + "execfile(__file__)\n" + ) % locals() + elif is_script: + script_text = get_script_header(script_text) + ( + "# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r\n" + "__requires__ = %(requires)r\n" + "import pkg_resources\n" + "pkg_resources.run_script(%(spec)r, %(script_name)r)\n" + ) % locals() + self.write_script(script_name, script_text, 'b') + + def write_script(self, script_name, contents, mode="t", blockers=()): + """Write an executable file to the scripts directory""" + self.delete_blockers( # clean up old .py/.pyw w/o a script + [os.path.join(self.script_dir,x) for x in blockers]) + log.info("Installing %s script to %s", script_name, self.script_dir) + target = os.path.join(self.script_dir, script_name) + self.add_output(target) + + if not self.dry_run: + ensure_directory(target) + f = open(target,"w"+mode) + f.write(contents) + f.close() + chmod(target,0755) + + + + + def install_eggs(self, spec, dist_filename, tmpdir): + # .egg dirs or files are already built, so just return them + if dist_filename.lower().endswith('.egg'): + return [self.install_egg(dist_filename, tmpdir)] + elif dist_filename.lower().endswith('.exe'): + return [self.install_exe(dist_filename, tmpdir)] + + # Anything else, try to extract and build + setup_base = tmpdir + if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): + unpack_archive(dist_filename, tmpdir, self.unpack_progress) + elif os.path.isdir(dist_filename): + setup_base = os.path.abspath(dist_filename) + + if (setup_base.startswith(tmpdir) # something we downloaded + and self.build_directory and spec is not None + ): + setup_base = self.maybe_move(spec, dist_filename, setup_base) + + # Find the setup.py file + setup_script = os.path.join(setup_base, 'setup.py') + + if not os.path.exists(setup_script): + setups = glob(os.path.join(setup_base, '*', 'setup.py')) + if not setups: + raise DistutilsError( + "Couldn't find a setup script in %s" % os.path.abspath(dist_filename) + ) + if len(setups)>1: + raise DistutilsError( + "Multiple setup scripts in %s" % os.path.abspath(dist_filename) + ) + setup_script = setups[0] + + # Now run it, and return the result + if self.editable: + log.info(self.report_editable(spec, setup_script)) + return [] + else: + return self.build_and_install(setup_script, setup_base) + + def egg_distribution(self, egg_path): + if os.path.isdir(egg_path): + metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO')) + else: + metadata = EggMetadata(zipimport.zipimporter(egg_path)) + return Distribution.from_filename(egg_path,metadata=metadata) + + def install_egg(self, egg_path, tmpdir): + destination = os.path.join(self.install_dir,os.path.basename(egg_path)) + destination = os.path.abspath(destination) + if not self.dry_run: + ensure_directory(destination) + + dist = self.egg_distribution(egg_path) + self.check_conflicts(dist) + if not samefile(egg_path, destination): + if os.path.isdir(destination) and not os.path.islink(destination): + dir_util.remove_tree(destination, dry_run=self.dry_run) + elif os.path.exists(destination): + self.execute(os.unlink,(destination,),"Removing "+destination) + uncache_zipdir(destination) + if os.path.isdir(egg_path): + if egg_path.startswith(tmpdir): + f,m = shutil.move, "Moving" + else: + f,m = shutil.copytree, "Copying" + elif self.should_unzip(dist): + self.mkpath(destination) + f,m = self.unpack_and_compile, "Extracting" + elif egg_path.startswith(tmpdir): + f,m = shutil.move, "Moving" + else: + f,m = shutil.copy2, "Copying" + + self.execute(f, (egg_path, destination), + (m+" %s to %s") % + (os.path.basename(egg_path),os.path.dirname(destination))) + + self.add_output(destination) + return self.egg_distribution(destination) + + def install_exe(self, dist_filename, tmpdir): + # See if it's valid, get data + cfg = extract_wininst_cfg(dist_filename) + if cfg is None: + raise DistutilsError( + "%s is not a valid distutils Windows .exe" % dist_filename + ) + # Create a dummy distribution object until we build the real distro + dist = Distribution(None, + project_name=cfg.get('metadata','name'), + version=cfg.get('metadata','version'), platform="win32" + ) + + # Convert the .exe to an unpacked egg + egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg') + egg_tmp = egg_path+'.tmp' + egg_info = os.path.join(egg_tmp, 'EGG-INFO') + pkg_inf = os.path.join(egg_info, 'PKG-INFO') + ensure_directory(pkg_inf) # make sure EGG-INFO dir exists + dist._provider = PathMetadata(egg_tmp, egg_info) # XXX + self.exe_to_egg(dist_filename, egg_tmp) + + # Write EGG-INFO/PKG-INFO + if not os.path.exists(pkg_inf): + f = open(pkg_inf,'w') + f.write('Metadata-Version: 1.0\n') + for k,v in cfg.items('metadata'): + if k!='target_version': + f.write('%s: %s\n' % (k.replace('_','-').title(), v)) + f.close() + script_dir = os.path.join(egg_info,'scripts') + self.delete_blockers( # delete entry-point scripts to avoid duping + [os.path.join(script_dir,args[0]) for args in get_script_args(dist)] + ) + # Build .egg file from tmpdir + bdist_egg.make_zipfile( + egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run + ) + # install the .egg + return self.install_egg(egg_path, tmpdir) + + def exe_to_egg(self, dist_filename, egg_tmp): + """Extract a bdist_wininst to the directories an egg would use""" + # Check for .pth file and set up prefix translations + prefixes = get_exe_prefixes(dist_filename) + to_compile = [] + native_libs = [] + top_level = {} + def process(src,dst): + s = src.lower() + for old,new in prefixes: + if s.startswith(old): + src = new+src[len(old):] + parts = src.split('/') + dst = os.path.join(egg_tmp, *parts) + dl = dst.lower() + if dl.endswith('.pyd') or dl.endswith('.dll'): + parts[-1] = bdist_egg.strip_module(parts[-1]) + top_level[os.path.splitext(parts[0])[0]] = 1 + native_libs.append(src) + elif dl.endswith('.py') and old!='SCRIPTS/': + top_level[os.path.splitext(parts[0])[0]] = 1 + to_compile.append(dst) + return dst + if not src.endswith('.pth'): + log.warn("WARNING: can't process %s", src) + return None + # extract, tracking .pyd/.dll->native_libs and .py -> to_compile + unpack_archive(dist_filename, egg_tmp, process) + stubs = [] + for res in native_libs: + if res.lower().endswith('.pyd'): # create stubs for .pyd's + parts = res.split('/') + resource = parts[-1] + parts[-1] = bdist_egg.strip_module(parts[-1])+'.py' + pyfile = os.path.join(egg_tmp, *parts) + to_compile.append(pyfile); stubs.append(pyfile) + bdist_egg.write_stub(resource, pyfile) + self.byte_compile(to_compile) # compile .py's + bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'), + bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag + + for name in 'top_level','native_libs': + if locals()[name]: + txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt') + if not os.path.exists(txt): + open(txt,'w').write('\n'.join(locals()[name])+'\n') + + def check_conflicts(self, dist): + """Verify that there are no conflicting "old-style" packages""" + + return dist # XXX temporarily disable until new strategy is stable + from imp import find_module, get_suffixes + from glob import glob + + blockers = [] + names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr + + exts = {'.pyc':1, '.pyo':1} # get_suffixes() might leave one out + for ext,mode,typ in get_suffixes(): + exts[ext] = 1 + + for path,files in expand_paths([self.install_dir]+self.all_site_dirs): + for filename in files: + base,ext = os.path.splitext(filename) + if base in names: + if not ext: + # no extension, check for package + try: + f, filename, descr = find_module(base, [path]) + except ImportError: + continue + else: + if f: f.close() + if filename not in blockers: + blockers.append(filename) + elif ext in exts and base!='site': # XXX ugh + blockers.append(os.path.join(path,filename)) + if blockers: + self.found_conflicts(dist, blockers) + + return dist + + def found_conflicts(self, dist, blockers): + if self.delete_conflicting: + log.warn("Attempting to delete conflicting packages:") + return self.delete_blockers(blockers) + + msg = """\ +------------------------------------------------------------------------- +CONFLICT WARNING: + +The following modules or packages have the same names as modules or +packages being installed, and will be *before* the installed packages in +Python's search path. You MUST remove all of the relevant files and +directories before you will be able to use the package(s) you are +installing: + + %s + +""" % '\n '.join(blockers) + + if self.ignore_conflicts_at_my_risk: + msg += """\ +(Note: you can run EasyInstall on '%s' with the +--delete-conflicting option to attempt deletion of the above files +and/or directories.) +""" % dist.project_name + else: + msg += """\ +Note: you can attempt this installation again with EasyInstall, and use +either the --delete-conflicting (-D) option or the +--ignore-conflicts-at-my-risk option, to either delete the above files +and directories, or to ignore the conflicts, respectively. Note that if +you ignore the conflicts, the installed package(s) may not work. +""" + msg += """\ +------------------------------------------------------------------------- +""" + sys.stderr.write(msg) + sys.stderr.flush() + if not self.ignore_conflicts_at_my_risk: + raise DistutilsError("Installation aborted due to conflicts") + + def installation_report(self, req, dist, what="Installed"): + """Helpful installation message for display to package users""" + msg = "\n%(what)s %(eggloc)s%(extras)s" + if self.multi_version and not self.no_report: + msg += """ + +Because this distribution was installed --multi-version, before you can +import modules from this package in an application, you will need to +'import pkg_resources' and then use a 'require()' call similar to one of +these examples, in order to select the desired version: + + pkg_resources.require("%(name)s") # latest installed version + pkg_resources.require("%(name)s==%(version)s") # this exact version + pkg_resources.require("%(name)s>=%(version)s") # this version or higher +""" + if self.install_dir not in map(normalize_path,sys.path): + msg += """ + +Note also that the installation directory must be on sys.path at runtime for +this to work. (e.g. by being the application's script directory, by being on +PYTHONPATH, or by being added to sys.path by your code.) +""" + eggloc = dist.location + name = dist.project_name + version = dist.version + extras = '' # TODO: self.report_extras(req, dist) + return msg % locals() + + def report_editable(self, spec, setup_script): + dirname = os.path.dirname(setup_script) + python = sys.executable + return """\nExtracted editable version of %(spec)s to %(dirname)s + +If it uses setuptools in its setup script, you can activate it in +"development" mode by going to that directory and running:: + + %(python)s setup.py develop + +See the setuptools documentation for the "develop" command for more info. +""" % locals() + + def run_setup(self, setup_script, setup_base, args): + sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) + sys.modules.setdefault('distutils.command.egg_info', egg_info) + + args = list(args) + if self.verbose>2: + v = 'v' * (self.verbose - 1) + args.insert(0,'-'+v) + elif self.verbose<2: + args.insert(0,'-q') + if self.dry_run: + args.insert(0,'-n') + log.info( + "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args) + ) + try: + run_setup(setup_script, args) + except SystemExit, v: + raise DistutilsError("Setup script exited with %s" % (v.args[0],)) + + def build_and_install(self, setup_script, setup_base): + args = ['bdist_egg', '--dist-dir'] + dist_dir = tempfile.mkdtemp( + prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) + ) + try: + args.append(dist_dir) + self.run_setup(setup_script, setup_base, args) + all_eggs = Environment([dist_dir]) + eggs = [] + for key in all_eggs: + for dist in all_eggs[key]: + eggs.append(self.install_egg(dist.location, setup_base)) + if not eggs and not self.dry_run: + log.warn("No eggs found in %s (setup script problem?)", + dist_dir) + return eggs + finally: + rmtree(dist_dir) + log.set_verbosity(self.verbose) # restore our log verbosity + + def update_pth(self,dist): + if self.pth_file is None: + return + + for d in self.pth_file[dist.key]: # drop old entries + if self.multi_version or d.location != dist.location: + log.info("Removing %s from easy-install.pth file", d) + self.pth_file.remove(d) + if d.location in self.shadow_path: + self.shadow_path.remove(d.location) + + if not self.multi_version: + if dist.location in self.pth_file.paths: + log.info( + "%s is already the active version in easy-install.pth", + dist + ) + else: + log.info("Adding %s to easy-install.pth file", dist) + self.pth_file.add(dist) # add new entry + if dist.location not in self.shadow_path: + self.shadow_path.append(dist.location) + + if not self.dry_run: + + self.pth_file.save() + + if dist.key=='setuptools': + # Ensure that setuptools itself never becomes unavailable! + # XXX should this check for latest version? + filename = os.path.join(self.install_dir,'setuptools.pth') + if os.path.islink(filename): os.unlink(filename) + f = open(filename, 'wt') + f.write(self.pth_file.make_relative(dist.location)+'\n') + f.close() + + def unpack_progress(self, src, dst): + # Progress filter for unpacking + log.debug("Unpacking %s to %s", src, dst) + return dst # only unpack-and-compile skips files for dry run + + def unpack_and_compile(self, egg_path, destination): + to_compile = []; to_chmod = [] + + def pf(src,dst): + if dst.endswith('.py') and not src.startswith('EGG-INFO/'): + to_compile.append(dst) + elif dst.endswith('.dll') or dst.endswith('.so'): + to_chmod.append(dst) + self.unpack_progress(src,dst) + return not self.dry_run and dst or None + + unpack_archive(egg_path, destination, pf) + self.byte_compile(to_compile) + if not self.dry_run: + for f in to_chmod: + mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755 + chmod(f, mode) + + def byte_compile(self, to_compile): + if _dont_write_bytecode: + self.warn('byte-compiling is disabled, skipping.') + return + from distutils.util import byte_compile + try: + # try to make the byte compile messages quieter + log.set_verbosity(self.verbose - 1) + + byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) + if self.optimize: + byte_compile( + to_compile, optimize=self.optimize, force=1, + dry_run=self.dry_run + ) + finally: + log.set_verbosity(self.verbose) # restore original verbosity + + + + + + + + + + def no_default_version_msg(self): + return """bad install directory or PYTHONPATH + +You are attempting to install a package to a directory that is not +on PYTHONPATH and which Python does not read ".pth" files from. The +installation directory you specified (via --install-dir, --prefix, or +the distutils default setting) was: + + %s + +and your PYTHONPATH environment variable currently contains: + + %r + +Here are some of your options for correcting the problem: + +* You can choose a different installation directory, i.e., one that is + on PYTHONPATH or supports .pth files + +* You can add the installation directory to the PYTHONPATH environment + variable. (It must then also be on PYTHONPATH whenever you run + Python and want to use the package(s) you are installing.) + +* You can set up the installation directory to support ".pth" files by + using one of the approaches described here: + + http://peak.telecommunity.com/EasyInstall.html#custom-installation-locations + +Proceeding to install. Please remember that unless you make one of +these changes you will not be able to run the installed code. +""" % ( + self.install_dir, os.environ.get('PYTHONPATH','') + ) + + + + + + + + + + + def install_site_py(self): + """Make sure there's a site.py in the target dir, if needed""" + + if self.sitepy_installed: + return # already did it, or don't need to + + sitepy = os.path.join(self.install_dir, "site.py") + source = resource_string("setuptools", "site-patch.py") + current = "" + + if os.path.exists(sitepy): + log.debug("Checking existing site.py in %s", self.install_dir) + current = open(sitepy,'rb').read() + if not current.startswith('def __boot():'): + print ("\n" + "***********************************************************************\n" + "Warning: %s is not a\n" + "setuptools-generated site.py. It will not be overwritten.\n" + "***********************************************************************\n" + ) % (sitepy,) + self.sitepy_installed = True + return + + if current != source: + log.info("Creating %s", sitepy) + if not self.dry_run: + ensure_directory(sitepy) + f = open(sitepy,'wb') + f.write(source) + f.close() + self.byte_compile([sitepy]) + + self.sitepy_installed = True + + + + + + + + + + + + + INSTALL_SCHEMES = dict( + posix = dict( + install_dir = '$base/lib/python$py_version_short/site-packages', + script_dir = '$base/bin', + ), + ) + + DEFAULT_SCHEME = dict( + install_dir = '$base/Lib/site-packages', + script_dir = '$base/Scripts', + ) + + def _expand(self, *attrs): + config_vars = self.get_finalized_command('install').config_vars + + if self.prefix: + # Set default install_dir/scripts from --prefix + config_vars = config_vars.copy() + config_vars['base'] = self.prefix + scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME) + for attr,val in scheme.items(): + if getattr(self,attr,None) is None: + setattr(self,attr,val) + + from distutils.util import subst_vars + for attr in attrs: + val = getattr(self, attr) + if val is not None: + val = subst_vars(val, config_vars) + if os.name == 'posix': + val = os.path.expanduser(val) + setattr(self, attr, val) + + + + + + + + + +def get_site_dirs(): + # return a list of 'site' dirs + sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep)) + prefixes = [sys.prefix] + if sys.exec_prefix != sys.prefix: + prefixes.append(sys.exec_prefix) + for prefix in prefixes: + if prefix: + if sys.platform in ('os2emx', 'riscos'): + sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) + elif os.sep == '/': + sitedirs.extend([os.path.join(prefix, + "lib", + "python" + sys.version[:3], + "site-packages"), + os.path.join(prefix, "lib", "site-python")]) + else: + sitedirs.extend( + [prefix, os.path.join(prefix, "lib", "site-packages")] + ) + if sys.platform == 'darwin': + # for framework builds *only* we add the standard Apple + # locations. Currently only per-user, but /Library and + # /Network/Library could be added too + if 'Python.framework' in prefix: + home = os.environ.get('HOME') + if home: + sitedirs.append( + os.path.join(home, + 'Library', + 'Python', + sys.version[:3], + 'site-packages')) + for plat_specific in (0,1): + site_lib = get_python_lib(plat_specific) + if site_lib not in sitedirs: sitedirs.append(site_lib) + + sitedirs = map(normalize_path, sitedirs) + return sitedirs + + +def expand_paths(inputs): + """Yield sys.path directories that might contain "old-style" packages""" + + seen = {} + + for dirname in inputs: + dirname = normalize_path(dirname) + if dirname in seen: + continue + + seen[dirname] = 1 + if not os.path.isdir(dirname): + continue + + files = os.listdir(dirname) + yield dirname, files + + for name in files: + if not name.endswith('.pth'): + # We only care about the .pth files + continue + if name in ('easy-install.pth','setuptools.pth'): + # Ignore .pth files that we control + continue + + # Read the .pth file + f = open(os.path.join(dirname,name)) + lines = list(yield_lines(f)) + f.close() + + # Yield existing non-dupe, non-import directory lines from it + for line in lines: + if not line.startswith("import"): + line = normalize_path(line.rstrip()) + if line not in seen: + seen[line] = 1 + if not os.path.isdir(line): + continue + yield line, os.listdir(line) + + +def extract_wininst_cfg(dist_filename): + """Extract configuration data from a bdist_wininst .exe + + Returns a ConfigParser.RawConfigParser, or None + """ + f = open(dist_filename,'rb') + try: + endrec = zipfile._EndRecData(f) + if endrec is None: + return None + + prepended = (endrec[9] - endrec[5]) - endrec[6] + if prepended < 12: # no wininst data here + return None + f.seek(prepended-12) + + import struct, StringIO, ConfigParser + tag, cfglen, bmlen = struct.unpack("egg path translations for a given .exe file""" + + prefixes = [ + ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), + ('PLATLIB/', ''), + ('SCRIPTS/', 'EGG-INFO/scripts/') + ] + z = zipfile.ZipFile(exe_filename) + try: + for info in z.infolist(): + name = info.filename + parts = name.split('/') + if len(parts)==3 and parts[2]=='PKG-INFO': + if parts[1].endswith('.egg-info'): + prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/')) + break + if len(parts)!=2 or not name.endswith('.pth'): + continue + if name.endswith('-nspkg.pth'): + continue + if parts[0].upper() in ('PURELIB','PLATLIB'): + for pth in yield_lines(z.read(name)): + pth = pth.strip().replace('\\','/') + if not pth.startswith('import'): + prefixes.append((('%s/%s/' % (parts[0],pth)), '')) + finally: + z.close() + prefixes = [(x.lower(),y) for x, y in prefixes] + prefixes.sort(); prefixes.reverse() + return prefixes + + +def parse_requirement_arg(spec): + try: + return Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % (spec,) + ) + +class PthDistributions(Environment): + """A .pth file with Distribution paths in it""" + + dirty = False + + def __init__(self, filename, sitedirs=()): + self.filename = filename; self.sitedirs=map(normalize_path, sitedirs) + self.basedir = normalize_path(os.path.dirname(self.filename)) + self._load(); Environment.__init__(self, [], None, None) + for path in yield_lines(self.paths): + map(self.add, find_distributions(path, True)) + + def _load(self): + self.paths = [] + saw_import = False + seen = dict.fromkeys(self.sitedirs) + if os.path.isfile(self.filename): + for line in open(self.filename,'rt'): + if line.startswith('import'): + saw_import = True + continue + path = line.rstrip() + self.paths.append(path) + if not path.strip() or path.strip().startswith('#'): + continue + # skip non-existent paths, in case somebody deleted a package + # manually, and duplicate paths as well + path = self.paths[-1] = normalize_path( + os.path.join(self.basedir,path) + ) + if not os.path.exists(path) or path in seen: + self.paths.pop() # skip it + self.dirty = True # we cleaned up, so we're dirty now :) + continue + seen[path] = 1 + + if self.paths and not saw_import: + self.dirty = True # ensure anything we touch has import wrappers + while self.paths and not self.paths[-1].strip(): + self.paths.pop() + + def save(self): + """Write changed .pth file back to disk""" + if not self.dirty: + return + + data = '\n'.join(map(self.make_relative,self.paths)) + if data: + log.debug("Saving %s", self.filename) + data = ( + "import sys; sys.__plen = len(sys.path)\n" + "%s\n" + "import sys; new=sys.path[sys.__plen:];" + " del sys.path[sys.__plen:];" + " p=getattr(sys,'__egginsert',len(os.environ.get('PYTHONPATH','').split(os.pathsep))); sys.path[p:p]=new;" + " sys.__egginsert = p+len(new)\n" + ) % data + + if os.path.islink(self.filename): + os.unlink(self.filename) + f = open(self.filename,'wb') + f.write(data); f.close() + + elif os.path.exists(self.filename): + log.debug("Deleting empty %s", self.filename) + os.unlink(self.filename) + + self.dirty = False + + def add(self,dist): + """Add `dist` to the distribution map""" + if dist.location not in self.paths and dist.location not in self.sitedirs: + self.paths.append(dist.location); self.dirty = True + Environment.add(self,dist) + + def remove(self,dist): + """Remove `dist` from the distribution map""" + while dist.location in self.paths: + self.paths.remove(dist.location); self.dirty = True + Environment.remove(self,dist) + + + def make_relative(self,path): + npath, last = os.path.split(normalize_path(path)) + baselen = len(self.basedir) + parts = [last] + sep = os.altsep=='/' and '/' or os.sep + while len(npath)>=baselen: + if npath==self.basedir: + parts.append(os.curdir) + parts.reverse() + return sep.join(parts) + npath, last = os.path.split(npath) + parts.append(last) + else: + return path + +def get_script_header(script_text, executable=sys_executable, wininst=False): + """Create a #! line, getting options (if any) from script_text""" + from distutils.command.build_scripts import first_line_re + first = (script_text+'\n').splitlines()[0] + match = first_line_re.match(first) + options = '' + if match: + options = match.group(1) or '' + if options: options = ' '+options + if wininst: + executable = "python.exe" + else: + executable = nt_quote_arg(executable) + hdr = "#!%(executable)s%(options)s\n" % locals() + if unicode(hdr,'ascii','ignore').encode('ascii') != hdr: + # Non-ascii path to sys.executable, use -x to prevent warnings + if options: + if options.strip().startswith('-'): + options = ' -x'+options.strip()[1:] + # else: punt, we can't do it, let the warning happen anyway + else: + options = ' -x' + executable = fix_jython_executable(executable, options) + hdr = "#!%(executable)s%(options)s\n" % locals() + return hdr + +def auto_chmod(func, arg, exc): + if func is os.remove and os.name=='nt': + chmod(arg, stat.S_IWRITE) + return func(arg) + exc = sys.exc_info() + raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg))) + +def uncache_zipdir(path): + """Ensure that the importer caches dont have stale info for `path`""" + from zipimport import _zip_directory_cache as zdc + _uncache(path, zdc) + _uncache(path, sys.path_importer_cache) + +def _uncache(path, cache): + if path in cache: + del cache[path] + else: + path = normalize_path(path) + for p in cache: + if normalize_path(p)==path: + del cache[p] + return + +def is_python(text, filename=''): + "Is this string a valid Python script?" + try: + compile(text, filename, 'exec') + except (SyntaxError, TypeError): + return False + else: + return True + +def is_sh(executable): + """Determine if the specified executable is a .sh (contains a #! line)""" + try: + fp = open(executable) + magic = fp.read(2) + fp.close() + except (OSError,IOError): return executable + return magic == '#!' + +def nt_quote_arg(arg): + """Quote a command line argument according to Windows parsing rules""" + + result = [] + needquote = False + nb = 0 + + needquote = (" " in arg) or ("\t" in arg) + if needquote: + result.append('"') + + for c in arg: + if c == '\\': + nb += 1 + elif c == '"': + # double preceding backslashes, then add a \" + result.append('\\' * (nb*2) + '\\"') + nb = 0 + else: + if nb: + result.append('\\' * nb) + nb = 0 + result.append(c) + + if nb: + result.append('\\' * nb) + + if needquote: + result.append('\\' * nb) # double the trailing backslashes + result.append('"') + + return ''.join(result) + + + + + + + + + +def is_python_script(script_text, filename): + """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. + """ + if filename.endswith('.py') or filename.endswith('.pyw'): + return True # extension says it's Python + if is_python(script_text, filename): + return True # it's syntactically valid Python + if script_text.startswith('#!'): + # It begins with a '#!' line, so check if 'python' is in it somewhere + return 'python' in script_text.splitlines()[0].lower() + + return False # Not any Python I can recognize + +try: + from os import chmod as _chmod +except ImportError: + # Jython compatibility + def _chmod(*args): pass + +def chmod(path, mode): + log.debug("changing mode of %s to %o", path, mode) + try: + _chmod(path, mode) + except os.error, e: + log.debug("chmod failed: %s", e) + +def fix_jython_executable(executable, options): + if sys.platform.startswith('java') and is_sh(executable): + # Workaround Jython's sys.executable being a .sh (an invalid + # shebang line interpreter) + if options: + # Can't apply the workaround, leave it broken + log.warn("WARNING: Unable to adapt shebang line for Jython," + " the following script is NOT executable\n" + " see http://bugs.jython.org/issue1112 for" + " more information.") + else: + return '/usr/bin/env %s' % executable + return executable + + +def get_script_args(dist, executable=sys_executable, wininst=False, script_dir=None): + """Yield write_script() argument tuples for a distribution's entrypoints""" + spec = str(dist.as_requirement()) + requires = [spec] + [str(r) for r in dist.requires()] + header = get_script_header("", executable, wininst) + generated_by = "# generated by zetuptoolz %s" % (setuptools_version,) + + for group in 'console_scripts', 'gui_scripts': + for name, ep in dist.get_entry_map(group).items(): + script_head, script_tail = (( + "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n" + "%(generated_by)s\n" + "__requires__ = %(requires)r\n" + "import sys\n" + "from pkg_resources import load_entry_point\n" + "\n" + ) % locals(), ( + "sys.exit(\n" + " load_entry_point(%(spec)r, %(group)r, %(name)r)()\n" + ")\n" + ) % locals()) + + if wininst or sys.platform == "win32": + # On Windows/wininst, add a .py[w] extension. Delete any existing + # -script.py[w], .exe, and .exe.manifest. + if group=='gui_scripts': + ext = '.pyw' + old = ['','.pyw','-script.pyw','.exe','.exe.manifest'] + which_python = 'pythonw.exe' + new_header = re.sub('(?i)python.exe', which_python, header) + else: + ext = '.pyscript' + old = ['','.pyscript','.py','.pyc','.pyo','-script.py','.exe','.exe.manifest'] + which_python = 'python.exe' + new_header = re.sub('(?i)pythonw.exe', which_python, header) + + len_ext = len(ext) + script_head += ( + "# If this script doesn't work for you, make sure that the %(ext)s\n" + "# extension is included in the PATHEXT environment variable, and is\n" + "# associated with %(which_python)s in the registry.\n" + "\n" + "if sys.argv[0].endswith(%(ext)r):\n" + " sys.argv[0] = sys.argv[0][:-%(len_ext)r]\n" + "\n" + ) % locals() + + if os.path.exists(new_header[2:-1]) or sys.platform != 'win32': + hdr = new_header + else: + hdr = header + yield (name+ext, hdr + script_head + script_tail, 't', [name+x for x in old]) + + # Also write a shell script that runs the .pyscript, for cygwin. + # + # We can't use a Python script, because the Python interpreter that we want + # to use is the native Windows one, which won't understand a cygwin path. + # Windows paths written with forward slashes are universally understood + # (by native Python, cygwin Python, and bash), so we'll use 'cygpath -m' to + # get the directory from which the script was run in that form. This makes + # the cygwin script and .pyscript position-independent, provided they are + # in the same directory. + + def quote_path(s): + return "\\'".join("'" + p.replace('\\', '/') + "'" for p in s.split("'")) + + pyscript = quote_path("/"+name+ext) + python_path = quote_path(sys.executable) + shell_script_text = ( + '#!/bin/sh\n' + '%(generated_by)s\n' + '\n' + 'ScriptDir=`cygpath -m "$0/.."`\n' + '%(python_path)s "${ScriptDir}"%(pyscript)s "$@"\n' + ) % locals() + yield (name, shell_script_text, 'b') + else: + # On other platforms, we assume the right thing to do is to + # just write the stub with no extension. + yield (name, header + script_head + script_tail) + + +def rmtree(path, ignore_errors=False, onerror=auto_chmod): + """Recursively delete a directory tree. + + This code is taken from the Python 2.4 version of 'shutil', because + the 2.3 version doesn't really work right. + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + names = [] + try: + names = os.listdir(path) + except os.error, err: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error, err: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + +def bootstrap(): + # This function is called when setuptools*.egg is run using /bin/sh + import setuptools; argv0 = os.path.dirname(setuptools.__path__[0]) + sys.argv[0] = argv0; sys.argv.append(argv0); main() + + +def main(argv=None, **kw): + from setuptools import setup + from setuptools.dist import Distribution + import distutils.core + + USAGE = """\ +usage: %(script)s [options] requirement_or_url ... + or: %(script)s --help +""" + + def gen_usage (script_name): + script = os.path.basename(script_name) + return USAGE % vars() + + def with_ei_usage(f): + old_gen_usage = distutils.core.gen_usage + try: + distutils.core.gen_usage = gen_usage + return f() + finally: + distutils.core.gen_usage = old_gen_usage + + class DistributionWithoutHelpCommands(Distribution): + common_usage = "" + def _show_help(self,*args,**kw): + with_ei_usage(lambda: Distribution._show_help(self,*args,**kw)) + + if argv is None: + argv = sys.argv[1:] + + with_ei_usage(lambda: + setup( + script_args = ['-q','easy_install', '-v']+argv, + script_name = sys.argv[0] or 'easy_install', + distclass=DistributionWithoutHelpCommands, **kw + ) + ) + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/egg_info.py b/setuptools-0.6c16dev3.egg/setuptools/command/egg_info.py new file mode 100644 index 0000000..5a8b2db --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/egg_info.py @@ -0,0 +1,451 @@ +"""setuptools.command.egg_info + +Create a distribution's .egg-info directory and contents""" + +# This module should be kept compatible with Python 2.3 +import os, re +from setuptools import Command +from distutils.errors import * +from distutils import log +from setuptools.command.sdist import sdist +from distutils.util import convert_path +from distutils.filelist import FileList +from pkg_resources import parse_requirements, safe_name, parse_version, \ + safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename +from sdist import walk_revctrl + +class egg_info(Command): + description = "create a distribution's .egg-info directory" + + user_options = [ + ('egg-base=', 'e', "directory containing .egg-info directories" + " (default: top of the source tree)"), + ('tag-svn-revision', 'r', + "Add subversion revision ID to version number"), + ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), + ('tag-build=', 'b', "Specify explicit tag to add to version number"), + ('no-svn-revision', 'R', + "Don't add subversion revision ID [default]"), + ('no-date', 'D', "Don't include date stamp [default]"), + ] + + boolean_options = ['tag-date', 'tag-svn-revision'] + negative_opt = {'no-svn-revision': 'tag-svn-revision', + 'no-date': 'tag-date'} + + + + + + + + def initialize_options(self): + self.egg_name = None + self.egg_version = None + self.egg_base = None + self.egg_info = None + self.tag_build = None + self.tag_svn_revision = 0 + self.tag_date = 0 + self.broken_egg_info = False + self.vtags = None + + def save_version_info(self, filename): + from setopt import edit_config + edit_config( + filename, + {'egg_info': + {'tag_svn_revision':0, 'tag_date': 0, 'tag_build': self.tags()} + } + ) + + + + + + + + + + + + + + + + + + + + + + + def finalize_options (self): + self.egg_name = safe_name(self.distribution.get_name()) + self.vtags = self.tags() + self.egg_version = self.tagged_version() + + try: + list( + parse_requirements('%s==%s' % (self.egg_name,self.egg_version)) + ) + except ValueError: + raise DistutilsOptionError( + "Invalid distribution name or version syntax: %s-%s" % + (self.egg_name,self.egg_version) + ) + + if self.egg_base is None: + dirs = self.distribution.package_dir + self.egg_base = (dirs or {}).get('',os.curdir) + + self.ensure_dirname('egg_base') + self.egg_info = to_filename(self.egg_name)+'.egg-info' + if self.egg_base != os.curdir: + self.egg_info = os.path.join(self.egg_base, self.egg_info) + if '-' in self.egg_name: self.check_broken_egg_info() + + # Set package version for the benefit of dumber commands + # (e.g. sdist, bdist_wininst, etc.) + # + self.distribution.metadata.version = self.egg_version + + # If we bootstrapped around the lack of a PKG-INFO, as might be the + # case in a fresh checkout, make sure that any special tags get added + # to the version info + # + pd = self.distribution._patched_dist + if pd is not None and pd.key==self.egg_name.lower(): + pd._version = self.egg_version + pd._parsed_version = parse_version(self.egg_version) + self.distribution._patched_dist = None + + + def write_or_delete_file(self, what, filename, data, force=False): + """Write `data` to `filename` or delete if empty + + If `data` is non-empty, this routine is the same as ``write_file()``. + If `data` is empty but not ``None``, this is the same as calling + ``delete_file(filename)`. If `data` is ``None``, then this is a no-op + unless `filename` exists, in which case a warning is issued about the + orphaned file (if `force` is false), or deleted (if `force` is true). + """ + if data: + self.write_file(what, filename, data) + elif os.path.exists(filename): + if data is None and not force: + log.warn( + "%s not set in setup(), but %s exists", what, filename + ) + return + else: + self.delete_file(filename) + + def write_file(self, what, filename, data): + """Write `data` to `filename` (if not a dry run) after announcing it + + `what` is used in a log message to identify what is being written + to the file. + """ + log.info("writing %s to %s", what, filename) + if not self.dry_run: + f = open(filename, 'wb') + f.write(data) + f.close() + + def delete_file(self, filename): + """Delete `filename` (if not a dry run) after announcing it""" + log.info("deleting %s", filename) + if not self.dry_run: + os.unlink(filename) + + def tagged_version(self): + return safe_version(self.distribution.get_version() + self.vtags) + + def run(self): + self.mkpath(self.egg_info) + installer = self.distribution.fetch_build_egg + for ep in iter_entry_points('egg_info.writers'): + writer = ep.load(installer=installer) + writer(self, ep.name, os.path.join(self.egg_info,ep.name)) + + # Get rid of native_libs.txt if it was put there by older bdist_egg + nl = os.path.join(self.egg_info, "native_libs.txt") + if os.path.exists(nl): + self.delete_file(nl) + + self.find_sources() + + def tags(self): + version = '' + if self.tag_build: + version+=self.tag_build + if self.tag_svn_revision and ( + os.path.exists('.svn') or os.path.exists('PKG-INFO') + ): version += '-r%s' % self.get_svn_revision() + if self.tag_date: + import time; version += time.strftime("-%Y%m%d") + return version + + + + + + + + + + + + + + + + + + def get_svn_revision(self): + revision = 0 + urlre = re.compile('url="([^"]+)"') + revre = re.compile('committed-rev="(\d+)"') + + for base,dirs,files in os.walk(os.curdir): + if '.svn' not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove('.svn') + f = open(os.path.join(base,'.svn','entries')) + data = f.read() + f.close() + + if data.startswith('9 and d[9]]+[0]) + if base==os.curdir: + base_url = dirurl+'/' # save the root url + elif not dirurl.startswith(base_url): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + + return str(revision or get_pkg_info_revision()) + + + + + def find_sources(self): + """Generate SOURCES.txt manifest file""" + manifest_filename = os.path.join(self.egg_info,"SOURCES.txt") + mm = manifest_maker(self.distribution) + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + + def check_broken_egg_info(self): + bei = self.egg_name+'.egg-info' + if self.egg_base != os.curdir: + bei = os.path.join(self.egg_base, bei) + if os.path.exists(bei): + log.warn( + "-"*78+'\n' + "Note: Your current .egg-info directory has a '-' in its name;" + '\nthis will not work correctly with "setup.py develop".\n\n' + 'Please rename %s to %s to correct this problem.\n'+'-'*78, + bei, self.egg_info + ) + self.broken_egg_info = self.egg_info + self.egg_info = bei # make it work for now + +class FileList(FileList): + """File list that accepts only existing, platform-independent paths""" + + def append(self, item): + if item.endswith('\r'): # Fix older sdists built on Windows + item = item[:-1] + path = convert_path(item) + if os.path.exists(path): + self.files.append(path) + + + + + + + + + +class manifest_maker(sdist): + + template = "MANIFEST.in" + + def initialize_options (self): + self.use_defaults = 1 + self.prune = 1 + self.manifest_only = 1 + self.force_manifest = 1 + + def finalize_options(self): + pass + + def run(self): + self.filelist = FileList() + if not os.path.exists(self.manifest): + self.write_manifest() # it must exist so it'll get in the list + self.filelist.findall() + self.add_defaults() + if os.path.exists(self.template): + self.read_template() + self.prune_file_list() + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def write_manifest (self): + """Write the file list in 'self.filelist' (presumably as filled in + by 'add_defaults()' and 'read_template()') to the manifest file + named by 'self.manifest'. + """ + files = self.filelist.files + if os.sep!='/': + files = [f.replace(os.sep,'/') for f in files] + self.execute(write_file, (self.manifest, files), + "writing manifest file '%s'" % self.manifest) + + def warn(self, msg): # suppress missing-file warnings from sdist + if not msg.startswith("standard file not found:"): + sdist.warn(self, msg) + + def add_defaults(self): + sdist.add_defaults(self) + self.filelist.append(self.template) + self.filelist.append(self.manifest) + rcfiles = list(walk_revctrl()) + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + ei_cmd = self.get_finalized_command('egg_info') + self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) + + def prune_file_list (self): + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + self.filelist.exclude_pattern(None, prefix=build.build_base) + self.filelist.exclude_pattern(None, prefix=base_dir) + sep = re.escape(os.sep) + self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1) + + +def write_file (filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + f = open(filename, "wb") # always write POSIX-style manifest + f.write("\n".join(contents)) + f.close() + + + + + + + + + + + + + +def write_pkg_info(cmd, basename, filename): + log.info("writing %s", filename) + if not cmd.dry_run: + metadata = cmd.distribution.metadata + metadata.version, oldver = cmd.egg_version, metadata.version + metadata.name, oldname = cmd.egg_name, metadata.name + try: + # write unescaped data to PKG-INFO, so older pkg_resources + # can still parse it + metadata.write_pkg_info(cmd.egg_info) + finally: + metadata.name, metadata.version = oldname, oldver + + safe = getattr(cmd.distribution,'zip_safe',None) + import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe) + +def warn_depends_obsolete(cmd, basename, filename): + if os.path.exists(filename): + log.warn( + "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + +def write_requirements(cmd, basename, filename): + dist = cmd.distribution + data = ['\n'.join(yield_lines(dist.install_requires or ()))] + for extra,reqs in (dist.extras_require or {}).items(): + data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs)))) + cmd.write_or_delete_file("requirements", filename, ''.join(data)) + +def write_toplevel_names(cmd, basename, filename): + pkgs = dict.fromkeys( + [k.split('.',1)[0] + for k in cmd.distribution.iter_distribution_names() + ] + ) + cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n') + + + +def overwrite_arg(cmd, basename, filename): + write_arg(cmd, basename, filename, True) + +def write_arg(cmd, basename, filename, force=False): + argname = os.path.splitext(basename)[0] + value = getattr(cmd.distribution, argname, None) + if value is not None: + value = '\n'.join(value)+'\n' + cmd.write_or_delete_file(argname, filename, value, force) + +def write_entries(cmd, basename, filename): + ep = cmd.distribution.entry_points + + if isinstance(ep,basestring) or ep is None: + data = ep + elif ep is not None: + data = [] + for section, contents in ep.items(): + if not isinstance(contents,basestring): + contents = EntryPoint.parse_group(section, contents) + contents = '\n'.join(map(str,contents.values())) + data.append('[%s]\n%s\n\n' % (section,contents)) + data = ''.join(data) + + cmd.write_or_delete_file('entry points', filename, data, True) + +def get_pkg_info_revision(): + # See if we can get a -r### off of PKG-INFO, in case this is an sdist of + # a subversion revision + # + if os.path.exists('PKG-INFO'): + f = open('PKG-INFO','rU') + for line in f: + match = re.match(r"Version:.*-r(\d+)\s*$", line) + if match: + return int(match.group(1)) + return 0 + + + +# diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/install.py b/setuptools-0.6c16dev3.egg/setuptools/command/install.py new file mode 100644 index 0000000..adaaeca --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/install.py @@ -0,0 +1,123 @@ +import setuptools, sys, glob +from distutils.command.install import install as _install +from distutils.errors import DistutilsArgError + +class install(_install): + """Use easy_install to install the package, w/dependencies""" + + user_options = _install.user_options + [ + ('old-and-unmanageable', None, "Try not to use this!"), + ('single-version-externally-managed', None, + "used by system package builders to create 'flat' eggs"), + ] + boolean_options = _install.boolean_options + [ + 'old-and-unmanageable', 'single-version-externally-managed', + ] + new_commands = [ + ('install_egg_info', lambda self: True), + ('install_scripts', lambda self: True), + ] + _nc = dict(new_commands) + sub_commands = [ + cmd for cmd in _install.sub_commands if cmd[0] not in _nc + ] + new_commands + + def initialize_options(self): + _install.initialize_options(self) + self.old_and_unmanageable = None + self.single_version_externally_managed = None + self.no_compile = None # make DISTUTILS_DEBUG work right! + + def finalize_options(self): + _install.finalize_options(self) + if self.root: + self.single_version_externally_managed = True + elif self.single_version_externally_managed: + if not self.root and not self.record: + raise DistutilsArgError( + "You must specify --record or --root when building system" + " packages" + ) + + def handle_extra_path(self): + if self.root or self.single_version_externally_managed: + # explicit backward-compatibility mode, allow extra_path to work + return _install.handle_extra_path(self) + + # Ignore extra_path when installing an egg (or being run by another + # command without --root or --single-version-externally-managed + self.path_file = None + self.extra_dirs = '' + + def run(self): + self.old_run() + if sys.platform == "win32": + from setuptools.command.scriptsetup import do_scriptsetup + do_scriptsetup() + + def old_run(self): + # Explicit request for old-style install? Just do it + if self.old_and_unmanageable or self.single_version_externally_managed: + return _install.run(self) + + # Attempt to detect whether we were called from setup() or by another + # command. If we were called by setup(), our caller will be the + # 'run_command' method in 'distutils.dist', and *its* caller will be + # the 'run_commands' method. If we were called any other way, our + # immediate caller *might* be 'run_command', but it won't have been + # called by 'run_commands'. This is slightly kludgy, but seems to + # work. + # + caller = sys._getframe(2) + caller_module = caller.f_globals.get('__name__','') + caller_name = caller.f_code.co_name + + if caller_module != 'distutils.dist' or caller_name!='run_commands': + # We weren't called from the command line or setup(), so we + # should run in backward-compatibility mode to support bdist_* + # commands. + _install.run(self) + else: + self.do_egg_install() + + def do_egg_install(self): + + easy_install = self.distribution.get_command_class('easy_install') + + cmd = easy_install( + self.distribution, args="x", root=self.root, record=self.record, + ) + cmd.ensure_finalized() # finalize before bdist_egg munges install cmd + cmd.always_copy_from = '.' # make sure local-dir eggs get installed + + # pick up setup-dir .egg files only: no .egg-info + cmd.package_index.scan(glob.glob('*.egg')) + + self.run_command('bdist_egg') + args = [self.distribution.get_command_obj('bdist_egg').egg_output] + + if setuptools.bootstrap_install_from: + # Bootstrap self-installation of setuptools + args.insert(0, setuptools.bootstrap_install_from) + + cmd.args = args + cmd.run() + setuptools.bootstrap_install_from = None + + + + + + + + + + + + + + + + + +# diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/install_egg_info.py b/setuptools-0.6c16dev3.egg/setuptools/command/install_egg_info.py new file mode 100644 index 0000000..939340c --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/install_egg_info.py @@ -0,0 +1,123 @@ +from setuptools import Command +from setuptools.archive_util import unpack_archive +from distutils import log, dir_util +import os, shutil, pkg_resources + +class install_egg_info(Command): + """Install an .egg-info directory for the package""" + + description = "Install an .egg-info directory for the package" + + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ] + + def initialize_options(self): + self.install_dir = None + + def finalize_options(self): + self.set_undefined_options('install_lib',('install_dir','install_dir')) + ei_cmd = self.get_finalized_command("egg_info") + basename = pkg_resources.Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version + ).egg_name()+'.egg-info' + self.source = ei_cmd.egg_info + self.target = os.path.join(self.install_dir, basename) + self.outputs = [self.target] + + def run(self): + self.run_command('egg_info') + target = self.target + if os.path.isdir(self.target) and not os.path.islink(self.target): + dir_util.remove_tree(self.target, dry_run=self.dry_run) + elif os.path.exists(self.target): + self.execute(os.unlink,(self.target,),"Removing "+self.target) + if not self.dry_run: + pkg_resources.ensure_directory(self.target) + self.execute(self.copytree, (), + "Copying %s to %s" % (self.source, self.target) + ) + self.install_namespaces() + + def get_outputs(self): + return self.outputs + + def copytree(self): + # Copy the .egg-info tree to site-packages + def skimmer(src,dst): + # filter out source-control directories; note that 'src' is always + # a '/'-separated path, regardless of platform. 'dst' is a + # platform-specific path. + for skip in '.svn/','CVS/': + if src.startswith(skip) or '/'+skip in src: + return None + self.outputs.append(dst) + log.debug("Copying %s to %s", src, dst) + return dst + unpack_archive(self.source, self.target, skimmer) + + + + + + + + + + + + + + + + + + + + + + + + + + def install_namespaces(self): + nsp = self._get_all_ns_packages() + if not nsp: return + filename,ext = os.path.splitext(self.target) + filename += '-nspkg.pth'; self.outputs.append(filename) + log.info("Installing %s",filename) + if not self.dry_run: + f = open(filename,'wb') + for pkg in nsp: + pth = tuple(pkg.split('.')) + trailer = '\n' + if '.' in pkg: + trailer = ( + "; m and setattr(sys.modules[%r], %r, m)\n" + % ('.'.join(pth[:-1]), pth[-1]) + ) + f.write( + "import sys,new,os; " + "p = os.path.join(sys._getframe(1).f_locals['sitedir'], " + "*%(pth)r); " + "ie = os.path.exists(os.path.join(p,'__init__.py')); " + "m = not ie and " + "sys.modules.setdefault(%(pkg)r,new.module(%(pkg)r)); " + "mp = (m or []) and m.__dict__.setdefault('__path__',[]); " + "(p not in mp) and mp.append(p)%(trailer)s" + % locals() + ) + f.close() + + def _get_all_ns_packages(self): + nsp = {} + for pkg in self.distribution.namespace_packages or []: + pkg = pkg.split('.') + while pkg: + nsp['.'.join(pkg)] = 1 + pkg.pop() + nsp=list(nsp) + nsp.sort() # set up shorter names first + return nsp + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/install_lib.py b/setuptools-0.6c16dev3.egg/setuptools/command/install_lib.py new file mode 100644 index 0000000..96c8dfe --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/install_lib.py @@ -0,0 +1,76 @@ +from distutils.command.install_lib import install_lib as _install_lib +import os + +class install_lib(_install_lib): + """Don't add compiled flags to filenames of non-Python files""" + + def _bytecode_filenames (self, py_filenames): + bytecode_files = [] + for py_file in py_filenames: + if not py_file.endswith('.py'): + continue + if self.compile: + bytecode_files.append(py_file + "c") + if self.optimize > 0: + bytecode_files.append(py_file + "o") + + return bytecode_files + + def run(self): + self.build() + outfiles = self.install() + if outfiles is not None: + # always compile, in case we have any extension stubs to deal with + self.byte_compile(outfiles) + + def get_exclusions(self): + exclude = {} + nsp = self.distribution.namespace_packages + + if (nsp and self.get_finalized_command('install') + .single_version_externally_managed + ): + for pkg in nsp: + parts = pkg.split('.') + while parts: + pkgdir = os.path.join(self.install_dir, *parts) + for f in '__init__.py', '__init__.pyc', '__init__.pyo': + exclude[os.path.join(pkgdir,f)] = 1 + parts.pop() + return exclude + + def copy_tree( + self, infile, outfile, + preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 + ): + assert preserve_mode and preserve_times and not preserve_symlinks + exclude = self.get_exclusions() + + if not exclude: + return _install_lib.copy_tree(self, infile, outfile) + + # Exclude namespace package __init__.py* files from the output + + from setuptools.archive_util import unpack_directory + from distutils import log + + outfiles = [] + + def pf(src, dst): + if dst in exclude: + log.warn("Skipping installation of %s (namespace package)",dst) + return False + + log.info("copying %s -> %s", src, os.path.dirname(dst)) + outfiles.append(dst) + return dst + + unpack_directory(infile, outfile, pf) + return outfiles + + def get_outputs(self): + outputs = _install_lib.get_outputs(self) + exclude = self.get_exclusions() + if exclude: + return [f for f in outputs if f not in exclude] + return outputs diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/install_scripts.py b/setuptools-0.6c16dev3.egg/setuptools/command/install_scripts.py new file mode 100644 index 0000000..79fa375 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/install_scripts.py @@ -0,0 +1,82 @@ +from distutils.command.install_scripts import install_scripts \ + as _install_scripts +from easy_install import get_script_args, sys_executable, chmod +from pkg_resources import Distribution, PathMetadata, ensure_directory +import os +from distutils import log + +class install_scripts(_install_scripts): + """Do normal script install, plus any egg_info wrapper scripts""" + + def initialize_options(self): + _install_scripts.initialize_options(self) + self.no_ep = False + + def run(self): + self.run_command("egg_info") + if self.distribution.scripts: + _install_scripts.run(self) # run first to set up self.outfiles + else: + self.outfiles = [] + if self.no_ep: + # don't install entry point scripts into .egg file! + return + + ei_cmd = self.get_finalized_command("egg_info") + dist = Distribution( + ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), + ei_cmd.egg_name, ei_cmd.egg_version, + ) + bs_cmd = self.get_finalized_command('build_scripts') + executable = getattr(bs_cmd,'executable',sys_executable) + is_wininst = getattr( + self.get_finalized_command("bdist_wininst"), '_is_running', False + ) + for args in get_script_args(dist, executable, is_wininst): + self.write_script(*args) + + + + + + def write_script(self, script_name, contents, mode="t", *ignored): + """Write an executable file to the scripts directory""" + log.info("Installing %s script to %s", script_name, self.install_dir) + target = os.path.join(self.install_dir, script_name) + self.outfiles.append(target) + + if not self.dry_run: + ensure_directory(target) + f = open(target,"w"+mode) + f.write(contents) + f.close() + chmod(target,0755) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/register.py b/setuptools-0.6c16dev3.egg/setuptools/command/register.py new file mode 100644 index 0000000..3b2e085 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/register.py @@ -0,0 +1,10 @@ +from distutils.command.register import register as _register + +class register(_register): + __doc__ = _register.__doc__ + + def run(self): + # Make sure that we are using valid current name/version info + self.run_command('egg_info') + _register.run(self) + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/rotate.py b/setuptools-0.6c16dev3.egg/setuptools/command/rotate.py new file mode 100644 index 0000000..8aab312 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/rotate.py @@ -0,0 +1,57 @@ +import distutils, os +from setuptools import Command +from distutils.util import convert_path +from distutils import log +from distutils.errors import * + +class rotate(Command): + """Delete older distributions""" + + description = "delete older distributions, keeping N newest files" + user_options = [ + ('match=', 'm', "patterns to match (required)"), + ('dist-dir=', 'd', "directory where the distributions are"), + ('keep=', 'k', "number of matching distributions to keep"), + ] + + boolean_options = [] + + def initialize_options(self): + self.match = None + self.dist_dir = None + self.keep = None + + def finalize_options(self): + if self.match is None: + raise DistutilsOptionError( + "Must specify one or more (comma-separated) match patterns " + "(e.g. '.zip' or '.egg')" + ) + if self.keep is None: + raise DistutilsOptionError("Must specify number of files to keep") + try: + self.keep = int(self.keep) + except ValueError: + raise DistutilsOptionError("--keep must be an integer") + if isinstance(self.match, basestring): + self.match = [ + convert_path(p.strip()) for p in self.match.split(',') + ] + self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) + + def run(self): + self.run_command("egg_info") + from glob import glob + for pattern in self.match: + pattern = self.distribution.get_name()+'*'+pattern + files = glob(os.path.join(self.dist_dir,pattern)) + files = [(os.path.getmtime(f),f) for f in files] + files.sort() + files.reverse() + + log.info("%d file(s) matching %s", len(files), pattern) + files = files[self.keep:] + for (t,f) in files: + log.info("Deleting %s", f) + if not self.dry_run: + os.unlink(f) diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/saveopts.py b/setuptools-0.6c16dev3.egg/setuptools/command/saveopts.py new file mode 100644 index 0000000..9c58d72 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/saveopts.py @@ -0,0 +1,24 @@ +import distutils, os +from setuptools import Command +from setuptools.command.setopt import edit_config, option_base + +class saveopts(option_base): + """Save command-line options to a file""" + + description = "save supplied options to setup.cfg or other config file" + + def run(self): + dist = self.distribution + commands = dist.command_options.keys() + settings = {} + + for cmd in commands: + + if cmd=='saveopts': + continue # don't save our own options! + + for opt,(src,val) in dist.get_option_dict(cmd).items(): + if src=="command line": + settings.setdefault(cmd,{})[opt] = val + + edit_config(self.filename, settings, self.dry_run) diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/scriptsetup.py b/setuptools-0.6c16dev3.egg/setuptools/command/scriptsetup.py new file mode 100644 index 0000000..db68c07 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/scriptsetup.py @@ -0,0 +1,284 @@ +from distutils.errors import DistutilsSetupError +from setuptools import Command +import sys + +class scriptsetup(Command): + action = (sys.platform == "win32" + and "set up .pyscript association and PATHEXT variable to run scripts" + or "this does nothing on non-Windows platforms") + + user_options = [ + ('allusers', 'a', + 'make changes for all users of this Windows installation (requires Administrator privileges)'), + ] + boolean_options = ['allusers'] + + def initialize_options(self): + self.allusers = False + + def finalize_options(self): + pass + + def run(self): + if sys.platform != "win32": + print "\n'scriptsetup' isn't needed on non-Windows platforms." + else: + do_scriptsetup(self.allusers) + + +def do_scriptsetup(allusers=False): + print "\nSetting up environment to run scripts for %s..." % (allusers and "all users" or "the current user") + + from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, HKEY_CLASSES_ROOT, \ + REG_SZ, REG_EXPAND_SZ, KEY_QUERY_VALUE, KEY_SET_VALUE, \ + OpenKey, CreateKey, QueryValueEx, SetValueEx, FlushKey, CloseKey + + USER_ENV = "Environment" + try: + user_env = OpenKey(HKEY_CURRENT_USER, USER_ENV, 0, KEY_QUERY_VALUE) + except WindowsError, e: + raise DistutilsSetupError("I could not read the user environment from the registry.\n%r" % (e,)) + + SYSTEM_ENV = "SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment" + try: + system_env = OpenKey(HKEY_LOCAL_MACHINE, SYSTEM_ENV, 0, KEY_QUERY_VALUE) + except WindowsError, e: + raise DistutilsSetupError("I could not read the system environment from the registry.\n%r" % (e,)) + + + # HKEY_CLASSES_ROOT is a merged view that would only confuse us. + # + + USER_CLASSES = "SOFTWARE\\Classes" + try: + user_classes = OpenKey(HKEY_CURRENT_USER, USER_CLASSES, 0, KEY_QUERY_VALUE) + except WindowsError, e: + raise DistutilsSetupError("I could not read the user filetype associations from the registry.\n%r" % (e,)) + + SYSTEM_CLASSES = "SOFTWARE\\Classes" + try: + system_classes = OpenKey(HKEY_LOCAL_MACHINE, SYSTEM_CLASSES, 0, KEY_QUERY_VALUE) + except WindowsError, e: + raise DistutilsSetupError("I could not read the system filetype associations from the registry.\n%r" % (e,)) + + + def query(key, subkey, what): + try: + (value, type) = QueryValueEx(key, subkey) + except WindowsError, e: + if e.winerror == 2: # not found + return None + raise DistutilsSetupError("I could not read %s from the registry.\n%r" % (what, e)) + + # It does not matter that we don't expand environment strings, in fact it's better not to. + + if type != REG_SZ and type != REG_EXPAND_SZ: + raise DistutilsSetupError("I expected the registry entry for %s to have a string type (REG_SZ or REG_EXPAND_SZ), " + "and was flummoxed by it having type code %r." % (what, type)) + return (value, type) + + + def open_and_query(key, path, subkey, what): + try: + read_key = OpenKey(key, path, 0, KEY_QUERY_VALUE) + except WindowsError, e: + if e.winerror == 2: # not found + return None + raise DistutilsSetupError("I could not read %s from the registry because I could not open " + "the parent key.\n%r" % (what, e)) + + try: + return query(read_key, subkey, what) + finally: + CloseKey(read_key) + + + def update(key_name_path, subkey, desired_value, desired_type, goal, what): + (key, name, path) = key_name_path + + (old_value, old_type) = open_and_query(key, path, subkey, what) or (None, None) + if (old_value, old_type) == (desired_value, desired_type): + print "Already done: %s." % (goal,) + return False + + try: + update_key = OpenKey(key, path, 0, KEY_SET_VALUE|KEY_QUERY_VALUE) + except WindowsError, e: + if e.winerror != 2: + raise DistutilsSetupError("I tried to %s, but was not successful because I could not open " + "the registry key %s\\%s for writing.\n%r" + % (goal, name, path, e)) + try: + update_key = CreateKey(key, path) + except WindowsError, e: + raise DistutilsSetupError("I tried to %s, but was not successful because the registry key %s\\%s " + "did not exist, and I was unable to create it.\n%r" + % (goal, name, path, e)) + + (new_value, new_type) = (None, None) + try: + SetValueEx(update_key, subkey, 0, desired_type, desired_value) + except WindowsError, e: + raise DistutilsSetupError("I tried to %s, but was not able to set the subkey %r under %s\\%s to be %r.\n%r" + % (goal, subkey, name, path, desired_value)) + else: + (new_value, new_type) = query(update_key, subkey, what) or (None, None) + finally: + FlushKey(update_key) + CloseKey(update_key) + + if (new_value, new_type) != (desired_value, desired_type): + raise DistutilsSetupError("I tried to %s by setting the subkey %r under %s\\%s to be %r, " + "and the call to SetValueEx succeeded, but the value ended up as " + "%r instead (it was previously %r). Maybe the update was unexpectedly virtualized?" + % (goal, subkey, name, path, desired_value, new_value, old_value)) + + print "Done: %s." % (goal,) + return True + + + # Maintenance hazard: 'add_to_environment' and 'associate' use very similar, but not identical logic. + + def add_to_environment(varname, addition, change_allusers): + changed = False + what = "the %s environment variable %s" % (change_allusers and "system" or "user", varname) + goal = "add %s to %s" % (addition, what) + + system_valueandtype = query(system_env, varname, "the system environment variable %s" % (varname,)) + user_valueandtype = query(user_env, varname, "the user environment variable %s" % (varname,)) + + if change_allusers: + (value, type) = system_valueandtype or (u'', REG_SZ) + key_name_path = (HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", SYSTEM_ENV) + else: + (value, type) = user_valueandtype or system_valueandtype or (u'', REG_SZ) + key_name_path = (HKEY_CURRENT_USER, "HKEY_CURRENT_USER", USER_ENV) + + if addition.lower() in value.lower().split(u';'): + print "Already done: %s." % (goal,) + else: + changed |= update(key_name_path, varname, value + u';' + addition, type, goal, what) + + if change_allusers: + # Also change any overriding environment entry for the current user. + (user_value, user_type) = user_valueandtype or (u'', REG_SZ) + split_value = user_value.lower().split(u';') + + if not (addition.lower() in split_value or u'%'+varname.lower()+u'%' in split_value): + now_what = "the overriding user environment variable %s" % (varname,) + changed |= update((HKEY_CURRENT_USER, "HKEY_CURRENT_USER", USER_ENV), + varname, user_value + u';' + addition, user_type, + "add %s to %s" % (addition, now_what), now_what) + + return changed + + + def associate(ext, target, change_allusers): + changed = False + what = "the %s association for %s" % (change_allusers and "system" or "user", ext) + goal = "associate the filetype %s with %s for %s" % (ext, target, change_allusers and "all users" or "the current user") + + try: + if change_allusers: + target_key = OpenKey(HKEY_LOCAL_MACHINE, "%s\\%s" % (SYSTEM_CLASSES, target), 0, KEY_QUERY_VALUE) + else: + target_key = OpenKey(HKEY_CLASSES_ROOT, target, 0, KEY_QUERY_VALUE) + except WindowsError, e: + raise DistutilsSetupError("I was going to %s, but that won't work because the %s class does not exist in the registry, " + "as far as I can tell.\n%r" % (goal, target, e)) + CloseKey(target_key) + + system_key_name_path = (HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", "%s\\%s" % (SYSTEM_CLASSES, ext)) + user_key_name_path = (HKEY_CURRENT_USER, "HKEY_CURRENT_USER", "%s\\%s" % (USER_CLASSES, ext)) + + system_valueandtype = open_and_query(system_classes, ext, "", "the system association for %s" % (ext,)) + user_valueandtype = open_and_query(user_classes, ext, "", "the user association for %s" % (ext,)) + + if change_allusers: + (value, type) = system_valueandtype or (u'', REG_SZ) + key_name_path = system_key_name_path + else: + (value, type) = user_valueandtype or system_valueandtype or (u'', REG_SZ) + key_name_path = user_key_name_path + + if value == target: + print "Already done: %s." % (goal,) + else: + changed |= update(key_name_path, "", unicode(target), REG_SZ, goal, what) + + if change_allusers: + # Also change any overriding association for the current user. + (user_value, user_type) = user_valueandtype or (u'', REG_SZ) + + if user_value != target: + changed |= update(user_key_name_path, "", unicode(target), REG_SZ, + "associate the filetype %s with %s for the current user " \ + "(because the system association is overridden)" % (ext, target), + "the overriding user association for %s" % (ext,)) + + return changed + + + def broadcast_settingchange(change_allusers): + print "Broadcasting that the environment has changed, please wait..." + + # + # + # LRESULT WINAPI SendMessageTimeoutW(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam, + # UINT fuFlags, UINT uTimeout, PDWORD_PTR lpdwResult); + + try: + from ctypes import WINFUNCTYPE, POINTER, windll, addressof, c_wchar_p + from ctypes.wintypes import LONG, HWND, UINT, WPARAM, LPARAM, DWORD + + SendMessageTimeout = WINFUNCTYPE(POINTER(LONG), HWND, UINT, WPARAM, LPARAM, UINT, UINT, POINTER(POINTER(DWORD))) \ + (("SendMessageTimeoutW", windll.user32)) + HWND_BROADCAST = 0xFFFF + WM_SETTINGCHANGE = 0x001A + SMTO_ABORTIFHUNG = 0x0002 + SendMessageTimeout(HWND_BROADCAST, WM_SETTINGCHANGE, change_allusers and 1 or 0, + addressof(c_wchar_p(u"Environment")), SMTO_ABORTIFHUNG, 5000, None); + except Exception, e: + print "Warning: %r" % (e,) + + + changed_assoc = associate(".pyscript", "Python.File", allusers) + + changed_env = False + try: + changed_env |= add_to_environment("PATHEXT", ".pyscript", allusers) + changed_env |= add_to_environment("PATHEXT", ".pyw", allusers) + finally: + CloseKey(user_env) + CloseKey(system_env) + + if changed_assoc or changed_env: + broadcast_settingchange(allusers) + + if changed_env: + # whether logout is needed seems to randomly differ between installations + # of XP, but it is not needed in Vista or later. + try: + import platform, re + need_logout = not re.search(r'^[6-9]|([1-9][0-9]+)\.', platform.version()) + except Exception, e: + e # hush pyflakes + need_logout = True + + if need_logout: + print """ +*********************************************************************** +Changes have been made to the persistent environment, but they may not +take effect in this Windows session. Running installed Python scripts +from a Command Prompt may only work after you have logged out and back +in again, or rebooted. +*********************************************************************** +""" + else: + print """ +*********************************************************************** +Changes have been made to the persistent environment, but not in this +Command Prompt. Running installed Python scripts will only work from +new Command Prompts opened from now on. +*********************************************************************** +""" diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/sdist.py b/setuptools-0.6c16dev3.egg/setuptools/command/sdist.py new file mode 100644 index 0000000..d84afdb --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/sdist.py @@ -0,0 +1,246 @@ +from distutils.command.sdist import sdist as _sdist +from distutils.util import convert_path +from distutils import log +from glob import glob +import os, re, sys, pkg_resources + +entities = [ + ("<","<"), (">", ">"), (""", '"'), ("'", "'"), + ("&", "&") +] + +def unescape(data): + for old,new in entities: + data = data.replace(old,new) + return data + +def re_finder(pattern, postproc=None): + def find(dirname, filename): + f = open(filename,'rU') + data = f.read() + f.close() + for match in pattern.finditer(data): + path = match.group(1) + if postproc: + path = postproc(path) + yield joinpath(dirname,path) + return find + +def joinpath(prefix,suffix): + if not prefix: + return suffix + return os.path.join(prefix,suffix) + + + + + + + + + +def walk_revctrl(dirname=''): + """Find all files under revision control""" + for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): + for item in ep.load()(dirname): + yield item + +def _default_revctrl(dirname=''): + for path, finder in finders: + path = joinpath(dirname,path) + if os.path.isfile(path): + for path in finder(dirname,path): + if os.path.isfile(path): + yield path + elif os.path.isdir(path): + for item in _default_revctrl(path): + yield item + +def externals_finder(dirname, filename): + """Find any 'svn:externals' directories""" + found = False + f = open(filename,'rb') + for line in iter(f.readline, ''): # can't use direct iter! + parts = line.split() + if len(parts)==2: + kind,length = parts + data = f.read(int(length)) + if kind=='K' and data=='svn:externals': + found = True + elif kind=='V' and found: + f.close() + break + else: + f.close() + return + + for line in data.splitlines(): + parts = line.split() + if parts: + yield joinpath(dirname, parts[0]) + + +entries_pattern = re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I) + +def entries_finder(dirname, filename): + f = open(filename,'rU') + data = f.read() + f.close() + if data.startswith('=6 and record[5]=="delete": + continue # skip deleted + yield joinpath(dirname, record[0]) + + +finders = [ + (convert_path('CVS/Entries'), + re_finder(re.compile(r"^\w?/([^/]+)/", re.M))), + (convert_path('.svn/entries'), entries_finder), + (convert_path('.svn/dir-props'), externals_finder), + (convert_path('.svn/dir-prop-base'), externals_finder), # svn 1.4 +] + + + + + + + + + + + + +class sdist(_sdist): + """Smart sdist that finds anything supported by revision control""" + + user_options = [ + ('formats=', None, + "formats for source distribution (comma-separated list)"), + ('keep-temp', 'k', + "keep the distribution tree around after creating " + + "archive file(s)"), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ] + + negative_opt = {} + + def run(self): + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + self.filelist = ei_cmd.filelist + self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt')) + self.check_readme() + self.check_metadata() + self.make_distribution() + + dist_files = getattr(self.distribution,'dist_files',[]) + for file in self.archive_files: + data = ('sdist', '', file) + if data not in dist_files: + dist_files.append(data) + + def read_template(self): + try: + _sdist.read_template(self) + except: + # grody hack to close the template file (MANIFEST.in) + # this prevents easy_install's attempt at deleting the file from + # dying and thus masking the real error + sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close() + raise + + # Cribbed from old distutils code, to work around new distutils code + # that tries to do some of the same stuff as we do, in a way that makes + # us loop. + + def add_defaults (self): + standards = [('README', 'README.txt'), self.distribution.script_name] + + for fn in standards: + if type(fn) is tuple: + alts = fn + got_it = 0 + for fn in alts: + if os.path.exists(fn): + got_it = 1 + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if os.path.exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + optional = ['test/test*.py', 'setup.cfg'] + + for pattern in optional: + files = filter(os.path.isfile, glob(pattern)) + if files: + self.filelist.extend(files) + + if self.distribution.has_pure_modules(): + build_py = self.get_finalized_command('build_py') + self.filelist.extend(build_py.get_source_files()) + + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + + def check_readme(self): + alts = ("README", "README.txt") + for f in alts: + if os.path.exists(f): + return + else: + self.warn( + "standard file not found: should have one of " +', '.join(alts) + ) + + + def make_release_tree(self, base_dir, files): + _sdist.make_release_tree(self, base_dir, files) + + # Save any egg_info command line options used to create this sdist + dest = os.path.join(base_dir, 'setup.cfg') + if hasattr(os,'link') and os.path.exists(dest): + # unlink and re-copy, since it might be hard-linked, and + # we don't want to change the source version + os.unlink(dest) + self.copy_file('setup.cfg', dest) + + self.get_finalized_command('egg_info').save_version_info(dest) + + + + + + + + +# diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/setopt.py b/setuptools-0.6c16dev3.egg/setuptools/command/setopt.py new file mode 100644 index 0000000..e0c1058 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/setopt.py @@ -0,0 +1,158 @@ +import distutils, os +from setuptools import Command +from distutils.util import convert_path +from distutils import log +from distutils.errors import * + +__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] + + +def config_file(kind="local"): + """Get the filename of the distutils, local, global, or per-user config + + `kind` must be one of "local", "global", or "user" + """ + if kind=='local': + return 'setup.cfg' + if kind=='global': + return os.path.join( + os.path.dirname(distutils.__file__),'distutils.cfg' + ) + if kind=='user': + dot = os.name=='posix' and '.' or '' + return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) + raise ValueError( + "config_file() type must be 'local', 'global', or 'user'", kind + ) + + + + + + + + + + + + + + + +def edit_config(filename, settings, dry_run=False): + """Edit a configuration file to include `settings` + + `settings` is a dictionary of dictionaries or ``None`` values, keyed by + command/section name. A ``None`` value means to delete the entire section, + while a dictionary lists settings to be changed or deleted in that section. + A setting of ``None`` means to delete that setting. + """ + from ConfigParser import RawConfigParser + log.debug("Reading configuration from %s", filename) + opts = RawConfigParser() + opts.read([filename]) + for section, options in settings.items(): + if options is None: + log.info("Deleting section [%s] from %s", section, filename) + opts.remove_section(section) + else: + if not opts.has_section(section): + log.debug("Adding new section [%s] to %s", section, filename) + opts.add_section(section) + for option,value in options.items(): + if value is None: + log.debug("Deleting %s.%s from %s", + section, option, filename + ) + opts.remove_option(section,option) + if not opts.options(section): + log.info("Deleting empty [%s] section from %s", + section, filename) + opts.remove_section(section) + else: + log.debug( + "Setting %s.%s to %r in %s", + section, option, value, filename + ) + opts.set(section,option,value) + + log.info("Writing %s", filename) + if not dry_run: + f = open(filename,'w'); opts.write(f); f.close() + +class option_base(Command): + """Abstract base class for commands that mess with config files""" + + user_options = [ + ('global-config', 'g', + "save options to the site-wide distutils.cfg file"), + ('user-config', 'u', + "save options to the current user's pydistutils.cfg file"), + ('filename=', 'f', + "configuration file to use (default=setup.cfg)"), + ] + + boolean_options = [ + 'global-config', 'user-config', + ] + + def initialize_options(self): + self.global_config = None + self.user_config = None + self.filename = None + + def finalize_options(self): + filenames = [] + if self.global_config: + filenames.append(config_file('global')) + if self.user_config: + filenames.append(config_file('user')) + if self.filename is not None: + filenames.append(self.filename) + if not filenames: + filenames.append(config_file('local')) + if len(filenames)>1: + raise DistutilsOptionError( + "Must specify only one configuration file option", + filenames + ) + self.filename, = filenames + + + + +class setopt(option_base): + """Save command-line options to a file""" + + description = "set an option in setup.cfg or another config file" + + user_options = [ + ('command=', 'c', 'command to set an option for'), + ('option=', 'o', 'option to set'), + ('set-value=', 's', 'value of the option'), + ('remove', 'r', 'remove (unset) the value'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.command = None + self.option = None + self.set_value = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.command is None or self.option is None: + raise DistutilsOptionError("Must specify --command *and* --option") + if self.set_value is None and not self.remove: + raise DistutilsOptionError("Must specify --set-value or --remove") + + def run(self): + edit_config( + self.filename, { + self.command: {self.option.replace('-','_'):self.set_value} + }, + self.dry_run + ) diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/test.py b/setuptools-0.6c16dev3.egg/setuptools/command/test.py new file mode 100644 index 0000000..df5add5 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/test.py @@ -0,0 +1,164 @@ +from setuptools import Command +from distutils.errors import DistutilsOptionError +import sys +from pkg_resources import * +from unittest import TestLoader, main + +class ScanningLoader(TestLoader): + + def loadTestsFromModule(self, module): + """Return a suite of all tests cases contained in the given module + + If the module is a package, load tests from all the modules in it. + If the module has an ``additional_tests`` function, call it and add + the return value to the tests. + """ + tests = [] + if module.__name__!='setuptools.tests.doctest': # ugh + tests.append(TestLoader.loadTestsFromModule(self,module)) + + if hasattr(module, "additional_tests"): + tests.append(module.additional_tests()) + + if hasattr(module, '__path__'): + for file in resource_listdir(module.__name__, ''): + if file.endswith('.py') and file!='__init__.py': + submodule = module.__name__+'.'+file[:-3] + else: + if resource_exists( + module.__name__, file+'/__init__.py' + ): + submodule = module.__name__+'.'+file + else: + continue + tests.append(self.loadTestsFromName(submodule)) + + if len(tests)!=1: + return self.suiteClass(tests) + else: + return tests[0] # don't create a nested suite for only one return + + +class test(Command): + """Command to run unit tests after in-place build""" + + description = "run unit tests after in-place build" + + user_options = [ + ('test-module=','m', "Run 'test_suite' in specified module"), + ('test-suite=','s', + "Test suite to run (e.g. 'some_module.test_suite')"), + ('test-runner=','r', "Test runner to use"), + ] + + def initialize_options(self): + self.test_runner = None + self.test_suite = None + self.test_module = None + self.test_loader = None + + def finalize_options(self): + if self.test_suite is None: + if self.test_module is None: + self.test_suite = self.distribution.test_suite + else: + self.test_suite = self.test_module+".test_suite" + elif self.test_module: + raise DistutilsOptionError( + "You may specify a module or a suite, but not both" + ) + + self.test_args = [self.test_suite] + + if self.verbose: + self.test_args.insert(0,'--verbose') + if self.test_loader is None: + self.test_loader = getattr(self.distribution,'test_loader',None) + if self.test_loader is None: + self.test_loader = "setuptools.command.test:ScanningLoader" + if self.test_runner is None: + self.test_runner = getattr(self.distribution,'test_runner',None) + + + def with_project_on_sys_path(self, func): + # Ensure metadata is up-to-date + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + ei_cmd = self.get_finalized_command("egg_info") + + old_path = sys.path[:] + old_modules = sys.modules.copy() + + try: + sys.path.insert(0, normalize_path(ei_cmd.egg_base)) + working_set.__init__() + add_activation_listener(lambda dist: dist.activate()) + require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) + func() + finally: + sys.path[:] = old_path + sys.modules.clear() + sys.modules.update(old_modules) + working_set.__init__() + + + def run(self): + if self.distribution.install_requires: + self.distribution.fetch_build_eggs(self.distribution.install_requires) + if self.distribution.tests_require: + self.distribution.fetch_build_eggs(self.distribution.tests_require) + + if self.test_suite: + cmd = ' '.join(self.test_args) + if self.dry_run: + self.announce('skipping "unittest %s" (dry run)' % cmd) + else: + self.announce('running "unittest %s"' % cmd) + self.with_project_on_sys_path(self.run_tests) + + + def run_tests(self): + import unittest + loader_ep = EntryPoint.parse("x="+self.test_loader) + loader_class = loader_ep.load(require=False) + kw = {} + if self.test_runner is not None: + runner_ep = EntryPoint.parse("x="+self.test_runner) + runner_class = runner_ep.load(require=False) + kw['testRunner'] = runner_class() + unittest.main( + None, None, [unittest.__file__]+self.test_args, + testLoader = loader_class(), **kw + ) + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/command/upload.py b/setuptools-0.6c16dev3.egg/setuptools/command/upload.py new file mode 100644 index 0000000..7ac08c2 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/command/upload.py @@ -0,0 +1,181 @@ +"""distutils.command.upload + +Implements the Distutils 'upload' subcommand (upload package to PyPI).""" + +from distutils.errors import * +from distutils.core import Command +from distutils.spawn import spawn +from distutils import log +try: + from hashlib import md5 +except ImportError: + from md5 import md5 +import os +import socket +import platform +import ConfigParser +import httplib +import base64 +import urlparse +import cStringIO as StringIO + +class upload(Command): + + description = "upload binary package to PyPI" + + DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi' + + user_options = [ + ('repository=', 'r', + "url of repository [default: %s]" % DEFAULT_REPOSITORY), + ('show-response', None, + 'display full response text from server'), + ('sign', 's', + 'sign files to upload using gpg'), + ('identity=', 'i', 'GPG identity used to sign files'), + ] + boolean_options = ['show-response', 'sign'] + + def initialize_options(self): + self.username = '' + self.password = '' + self.repository = '' + self.show_response = 0 + self.sign = False + self.identity = None + + def finalize_options(self): + if self.identity and not self.sign: + raise DistutilsOptionError( + "Must use --sign for --identity to have meaning" + ) + if os.environ.has_key('HOME'): + rc = os.path.join(os.environ['HOME'], '.pypirc') + if os.path.exists(rc): + self.announce('Using PyPI login from %s' % rc) + config = ConfigParser.ConfigParser({ + 'username':'', + 'password':'', + 'repository':''}) + config.read(rc) + if not self.repository: + self.repository = config.get('server-login', 'repository') + if not self.username: + self.username = config.get('server-login', 'username') + if not self.password: + self.password = config.get('server-login', 'password') + if not self.repository: + self.repository = self.DEFAULT_REPOSITORY + + def run(self): + if not self.distribution.dist_files: + raise DistutilsOptionError("No dist file created in earlier command") + for command, pyversion, filename in self.distribution.dist_files: + self.upload_file(command, pyversion, filename) + + def upload_file(self, command, pyversion, filename): + # Sign if requested + if self.sign: + gpg_args = ["gpg", "--detach-sign", "-a", filename] + if self.identity: + gpg_args[2:2] = ["--local-user", self.identity] + spawn(gpg_args, + dry_run=self.dry_run) + + # Fill in the data + content = open(filename,'rb').read() + basename = os.path.basename(filename) + comment = '' + if command=='bdist_egg' and self.distribution.has_ext_modules(): + comment = "built on %s" % platform.platform(terse=1) + data = { + ':action':'file_upload', + 'protcol_version':'1', + 'name':self.distribution.get_name(), + 'version':self.distribution.get_version(), + 'content':(basename,content), + 'filetype':command, + 'pyversion':pyversion, + 'md5_digest':md5(content).hexdigest(), + } + if command == 'bdist_rpm': + dist, version, id = platform.dist() + if dist: + comment = 'built for %s %s' % (dist, version) + elif command == 'bdist_dumb': + comment = 'built for %s' % platform.platform(terse=1) + data['comment'] = comment + + if self.sign: + data['gpg_signature'] = (os.path.basename(filename) + ".asc", + open(filename+".asc").read()) + + # set up the authentication + auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip() + + # Build up the MIME payload for the POST data + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = '\n--' + boundary + end_boundary = sep_boundary + '--' + body = StringIO.StringIO() + for key, value in data.items(): + # handle multiple entries for the same name + if type(value) != type([]): + value = [value] + for value in value: + if type(value) is tuple: + fn = ';filename="%s"' % value[0] + value = value[1] + else: + fn = "" + value = str(value) + body.write(sep_boundary) + body.write('\nContent-Disposition: form-data; name="%s"'%key) + body.write(fn) + body.write("\n\n") + body.write(value) + if value and value[-1] == '\r': + body.write('\n') # write an extra newline (lurve Macs) + body.write(end_boundary) + body.write("\n") + body = body.getvalue() + + self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) + + # build the Request + # We can't use urllib2 since we need to send the Basic + # auth right with the first request + schema, netloc, url, params, query, fragments = \ + urlparse.urlparse(self.repository) + assert not params and not query and not fragments + if schema == 'http': + http = httplib.HTTPConnection(netloc) + elif schema == 'https': + http = httplib.HTTPSConnection(netloc) + else: + raise AssertionError, "unsupported schema "+schema + + data = '' + loglevel = log.INFO + try: + http.connect() + http.putrequest("POST", url) + http.putheader('Content-type', + 'multipart/form-data; boundary=%s'%boundary) + http.putheader('Content-length', str(len(body))) + http.putheader('Authorization', auth) + http.endheaders() + http.send(body) + except socket.error, e: + self.announce(str(e), log.ERROR) + return + + r = http.getresponse() + if r.status == 200: + self.announce('Server response (%s): %s' % (r.status, r.reason), + log.INFO) + else: + self.announce('Upload failed (%s): %s' % (r.status, r.reason), + log.ERROR) + if self.show_response: + print '-'*75, r.read(), '-'*75 diff --git a/setuptools-0.6c16dev3.egg/setuptools/depends.py b/setuptools-0.6c16dev3.egg/setuptools/depends.py new file mode 100644 index 0000000..5fdf2d7 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/depends.py @@ -0,0 +1,246 @@ +from __future__ import generators +import sys, imp, marshal +from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN +from distutils.version import StrictVersion, LooseVersion + +__all__ = [ + 'Require', 'find_module', 'get_module_constant', 'extract_constant' +] + +class Require: + """A prerequisite to building or installing a distribution""" + + def __init__(self,name,requested_version,module,homepage='', + attribute=None,format=None + ): + + if format is None and requested_version is not None: + format = StrictVersion + + if format is not None: + requested_version = format(requested_version) + if attribute is None: + attribute = '__version__' + + self.__dict__.update(locals()) + del self.self + + + def full_name(self): + """Return full package/distribution name, w/version""" + if self.requested_version is not None: + return '%s-%s' % (self.name,self.requested_version) + return self.name + + + def version_ok(self,version): + """Is 'version' sufficiently up-to-date?""" + return self.attribute is None or self.format is None or \ + str(version)!="unknown" and version >= self.requested_version + + + def get_version(self, paths=None, default="unknown"): + + """Get version number of installed module, 'None', or 'default' + + Search 'paths' for module. If not found, return 'None'. If found, + return the extracted version attribute, or 'default' if no version + attribute was specified, or the value cannot be determined without + importing the module. The version is formatted according to the + requirement's version format (if any), unless it is 'None' or the + supplied 'default'. + """ + + if self.attribute is None: + try: + f,p,i = find_module(self.module,paths) + if f: f.close() + return default + except ImportError: + return None + + v = get_module_constant(self.module,self.attribute,default,paths) + + if v is not None and v is not default and self.format is not None: + return self.format(v) + + return v + + + def is_present(self,paths=None): + """Return true if dependency is present on 'paths'""" + return self.get_version(paths) is not None + + + def is_current(self,paths=None): + """Return true if dependency is present and up-to-date on 'paths'""" + version = self.get_version(paths) + if version is None: + return False + return self.version_ok(version) + + +def _iter_code(code): + + """Yield '(op,arg)' pair for each operation in code object 'code'""" + + from array import array + from dis import HAVE_ARGUMENT, EXTENDED_ARG + + bytes = array('b',code.co_code) + eof = len(code.co_code) + + ptr = 0 + extended_arg = 0 + + while ptr=HAVE_ARGUMENT: + + arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg + ptr += 3 + + if op==EXTENDED_ARG: + extended_arg = arg * 65536L + continue + + else: + arg = None + ptr += 1 + + yield op,arg + + + + + + + + + + +def find_module(module, paths=None): + """Just like 'imp.find_module()', but with package support""" + + parts = module.split('.') + + while parts: + part = parts.pop(0) + f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) + + if kind==PKG_DIRECTORY: + parts = parts or ['__init__'] + paths = [path] + + elif parts: + raise ImportError("Can't find %r in %s" % (parts,module)) + + return info + + + + + + + + + + + + + + + + + + + + + + + + +def get_module_constant(module, symbol, default=-1, paths=None): + + """Find 'module' by searching 'paths', and extract 'symbol' + + Return 'None' if 'module' does not exist on 'paths', or it does not define + 'symbol'. If the module defines 'symbol' as a constant, return the + constant. Otherwise, return 'default'.""" + + try: + f, path, (suffix,mode,kind) = find_module(module,paths) + except ImportError: + # Module doesn't exist + return None + + try: + if kind==PY_COMPILED: + f.read(8) # skip magic & date + code = marshal.load(f) + elif kind==PY_FROZEN: + code = imp.get_frozen_object(module) + elif kind==PY_SOURCE: + code = compile(f.read(), path, 'exec') + else: + # Not something we can parse; we'll have to import it. :( + if module not in sys.modules: + imp.load_module(module,f,path,(suffix,mode,kind)) + return getattr(sys.modules[module],symbol,None) + + finally: + if f: + f.close() + + return extract_constant(code,symbol,default) + + + + + + + + +def extract_constant(code,symbol,default=-1): + """Extract the constant value of 'symbol' from 'code' + + If the name 'symbol' is bound to a constant value by the Python code + object 'code', return that value. If 'symbol' is bound to an expression, + return 'default'. Otherwise, return 'None'. + + Return value is based on the first assignment to 'symbol'. 'symbol' must + be a global, or at least a non-"fast" local in the code block. That is, + only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' + must be present in 'code.co_names'. + """ + + if symbol not in code.co_names: + # name's not there, can't possibly be an assigment + return None + + name_idx = list(code.co_names).index(symbol) + + STORE_NAME = 90 + STORE_GLOBAL = 97 + LOAD_CONST = 100 + + const = default + + for op, arg in _iter_code(code): + + if op==LOAD_CONST: + const = code.co_consts[arg] + elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): + return const + else: + const = default + +if sys.platform.startswith('java') or sys.platform == 'cli': + # XXX it'd be better to test assertions about bytecode instead... + del extract_constant, get_module_constant + __all__.remove('extract_constant') + __all__.remove('get_module_constant') + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/dist.py b/setuptools-0.6c16dev3.egg/setuptools/dist.py new file mode 100644 index 0000000..482c6bf --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/dist.py @@ -0,0 +1,861 @@ +__all__ = ['Distribution'] + +from distutils.core import Distribution as _Distribution +from setuptools.depends import Require +from setuptools.command.install import install +from setuptools.command.sdist import sdist +from setuptools.command.install_lib import install_lib +from distutils.errors import DistutilsOptionError, DistutilsPlatformError +from distutils.errors import DistutilsSetupError +import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd +import os, distutils.log, re + +def _get_unpatched(cls): + """Protect against re-patching the distutils if reloaded + + Also ensures that no other distutils extension monkeypatched the distutils + first. + """ + while cls.__module__.startswith('setuptools'): + cls, = cls.__bases__ + if not cls.__module__.startswith('distutils'): + raise AssertionError( + "distutils has already been patched by %r" % cls + ) + return cls + +_Distribution = _get_unpatched(_Distribution) + +sequence = tuple, list + +def check_importable(dist, attr, value): + try: + ep = pkg_resources.EntryPoint.parse('x='+value) + assert not ep.extras + except (TypeError,ValueError,AttributeError,AssertionError): + raise DistutilsSetupError( + "%r must be importable 'module:attrs' string (got %r)" + % (attr,value) + ) + + +def assert_string_list(dist, attr, value): + """Verify that value is a string list or None""" + try: + assert ''.join(value)!=value + except (TypeError,ValueError,AttributeError,AssertionError): + raise DistutilsSetupError( + "%r must be a list of strings (got %r)" % (attr,value) + ) + +def check_nsp(dist, attr, value): + """Verify that namespace packages are valid""" + assert_string_list(dist,attr,value) + for nsp in value: + if not dist.has_contents_for(nsp): + raise DistutilsSetupError( + "Distribution contains no modules or packages for " + + "namespace package %r" % nsp + ) + if '.' in nsp: + parent = '.'.join(nsp.split('.')[:-1]) + if parent not in value: + distutils.log.warn( + "WARNING: %r is declared as a package namespace, but %r" + " is not: please correct this in setup.py", nsp, parent + ) + +def check_extras(dist, attr, value): + """Verify that extras_require mapping is valid""" + try: + for k,v in value.items(): + list(pkg_resources.parse_requirements(v)) + except (TypeError,ValueError,AttributeError): + raise DistutilsSetupError( + "'extras_require' must be a dictionary whose values are " + "strings or lists of strings containing valid project/version " + "requirement specifiers." + ) + + + + +def assert_bool(dist, attr, value): + """Verify that value is True, False, 0, or 1""" + if bool(value) != value: + raise DistutilsSetupError( + "%r must be a boolean value (got %r)" % (attr,value) + ) +def check_requirements(dist, attr, value): + """Verify that install_requires is a valid requirements list""" + try: + list(pkg_resources.parse_requirements(value)) + except (TypeError,ValueError): + raise DistutilsSetupError( + "%r must be a string or list of strings " + "containing valid project/version requirement specifiers" % (attr,) + ) +def check_entry_points(dist, attr, value): + """Verify that entry_points map is parseable""" + try: + pkg_resources.EntryPoint.parse_map(value) + except ValueError, e: + raise DistutilsSetupError(e) + +def check_test_suite(dist, attr, value): + if not isinstance(value,basestring): + raise DistutilsSetupError("test_suite must be a string") + +def check_package_data(dist, attr, value): + """Verify that value is a dictionary of package names to glob lists""" + if isinstance(value,dict): + for k,v in value.items(): + if not isinstance(k,str): break + try: iter(v) + except TypeError: + break + else: + return + raise DistutilsSetupError( + attr+" must be a dictionary mapping package names to lists of " + "wildcard patterns" + ) + +def check_packages(dist, attr, value): + for pkgname in value: + if not re.match(r'\w+(\.\w+)*', pkgname): + distutils.log.warn( + "WARNING: %r not a valid package name; please use only" + ".-separated package names in setup.py", pkgname + ) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +class Distribution(_Distribution): + """Distribution with support for features, tests, and package data + + This is an enhanced version of 'distutils.dist.Distribution' that + effectively adds the following new optional keyword arguments to 'setup()': + + 'install_requires' -- a string or sequence of strings specifying project + versions that the distribution requires when installed, in the format + used by 'pkg_resources.require()'. They will be installed + automatically when the package is installed. If you wish to use + packages that are not available in PyPI, or want to give your users an + alternate download location, you can add a 'find_links' option to the + '[easy_install]' section of your project's 'setup.cfg' file, and then + setuptools will scan the listed web pages for links that satisfy the + requirements. + + 'extras_require' -- a dictionary mapping names of optional "extras" to the + additional requirement(s) that using those extras incurs. For example, + this:: + + extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) + + indicates that the distribution can optionally provide an extra + capability called "reST", but it can only be used if docutils and + reSTedit are installed. If the user installs your package using + EasyInstall and requests one of your extras, the corresponding + additional requirements will be installed if needed. + + 'features' -- a dictionary mapping option names to 'setuptools.Feature' + objects. Features are a portion of the distribution that can be + included or excluded based on user options, inter-feature dependencies, + and availability on the current system. Excluded features are omitted + from all setup commands, including source and binary distributions, so + you can create multiple distributions from the same source tree. + Feature names should be valid Python identifiers, except that they may + contain the '-' (minus) sign. Features can be included or excluded + via the command line options '--with-X' and '--without-X', where 'X' is + the name of the feature. Whether a feature is included by default, and + whether you are allowed to control this from the command line, is + determined by the Feature object. See the 'Feature' class for more + information. + + 'test_suite' -- the name of a test suite to run for the 'test' command. + If the user runs 'python setup.py test', the package will be installed, + and the named test suite will be run. The format is the same as + would be used on a 'unittest.py' command line. That is, it is the + dotted name of an object to import and call to generate a test suite. + + 'package_data' -- a dictionary mapping package names to lists of filenames + or globs to use to find data files contained in the named packages. + If the dictionary has filenames or globs listed under '""' (the empty + string), those names will be searched for in every package, in addition + to any names for the specific package. Data files found using these + names/globs will be installed along with the package, in the same + location as the package. Note that globs are allowed to reference + the contents of non-package subdirectories, as long as you use '/' as + a path separator. (Globs are automatically converted to + platform-specific paths at runtime.) + + In addition to these new keywords, this class also has several new methods + for manipulating the distribution's contents. For example, the 'include()' + and 'exclude()' methods can be thought of as in-place add and subtract + commands that add or remove packages, modules, extensions, and so on from + the distribution. They are used by the feature subsystem to configure the + distribution for the included and excluded features. + """ + + _patched_dist = None + + def patch_missing_pkg_info(self, attrs): + # Fake up a replacement for the data that would normally come from + # PKG-INFO, but which might not yet be built if this is a fresh + # checkout. + # + if not attrs or 'name' not in attrs or 'version' not in attrs: + return + key = pkg_resources.safe_name(str(attrs['name'])).lower() + dist = pkg_resources.working_set.by_key.get(key) + if dist is not None and not dist.has_metadata('PKG-INFO'): + dist._version = pkg_resources.safe_version(str(attrs['version'])) + self._patched_dist = dist + + def __init__ (self, attrs=None): + have_package_data = hasattr(self, "package_data") + if not have_package_data: + self.package_data = {} + self.require_features = [] + self.features = {} + self.dist_files = [] + self.patch_missing_pkg_info(attrs) + # Make sure we have any eggs needed to interpret 'attrs' + if attrs is not None: + self.dependency_links = attrs.pop('dependency_links', []) + assert_string_list(self,'dependency_links',self.dependency_links) + if attrs and 'setup_requires' in attrs: + self.fetch_build_eggs(attrs.pop('setup_requires')) + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + if not hasattr(self,ep.name): + setattr(self,ep.name,None) + _Distribution.__init__(self,attrs) + if isinstance(self.metadata.version, (int,long,float)): + # Some people apparently take "version number" too literally :) + self.metadata.version = str(self.metadata.version) + + def parse_command_line(self): + """Process features after parsing command line options""" + result = _Distribution.parse_command_line(self) + if self.features: + self._finalize_features() + return result + + def _feature_attrname(self,name): + """Convert feature name to corresponding option attribute name""" + return 'with_'+name.replace('-','_') + + def fetch_build_eggs(self, requires): + """Resolve pre-setup requirements""" + from pkg_resources import working_set, parse_requirements + for dist in working_set.resolve( + parse_requirements(requires), installer=self.fetch_build_egg + ): + working_set.add(dist) + + def finalize_options(self): + _Distribution.finalize_options(self) + if self.features: + self._set_global_opts_from_features() + + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + value = getattr(self,ep.name,None) + if value is not None: + ep.require(installer=self.fetch_build_egg) + ep.load()(self, ep.name, value) + + def fetch_build_egg(self, req): + """Fetch an egg needed for building""" + try: + cmd = self._egg_fetcher + except AttributeError: + from setuptools.command.easy_install import easy_install + dist = self.__class__({'script_args':['easy_install']}) + dist.parse_config_files() + opts = dist.get_option_dict('easy_install') + keep = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', + 'site_dirs', 'allow_hosts' + ) + for key in opts.keys(): + if key not in keep: + del opts[key] # don't use any other settings + if self.dependency_links: + links = self.dependency_links[:] + if 'find_links' in opts: + links = opts['find_links'][1].split() + links + opts['find_links'] = ('setup', links) + cmd = easy_install( + dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, + always_copy=False, build_directory=None, editable=False, + upgrade=False, multi_version=True, no_report = True + ) + cmd.ensure_finalized() + self._egg_fetcher = cmd + return cmd.easy_install(req) + + def _set_global_opts_from_features(self): + """Add --with-X/--without-X options based on optional features""" + + go = [] + no = self.negative_opt.copy() + + for name,feature in self.features.items(): + self._set_feature(name,None) + feature.validate(self) + + if feature.optional: + descr = feature.description + incdef = ' (default)' + excdef='' + if not feature.include_by_default(): + excdef, incdef = incdef, excdef + + go.append(('with-'+name, None, 'include '+descr+incdef)) + go.append(('without-'+name, None, 'exclude '+descr+excdef)) + no['without-'+name] = 'with-'+name + + self.global_options = self.feature_options = go + self.global_options + self.negative_opt = self.feature_negopt = no + + + + + + + + + + + + + + + + + + + def _finalize_features(self): + """Add/remove features and resolve dependencies between them""" + + # First, flag all the enabled items (and thus their dependencies) + for name,feature in self.features.items(): + enabled = self.feature_is_included(name) + if enabled or (enabled is None and feature.include_by_default()): + feature.include_in(self) + self._set_feature(name,1) + + # Then disable the rest, so that off-by-default features don't + # get flagged as errors when they're required by an enabled feature + for name,feature in self.features.items(): + if not self.feature_is_included(name): + feature.exclude_from(self) + self._set_feature(name,0) + + + def get_command_class(self, command): + """Pluggable version of get_command_class()""" + if command in self.cmdclass: + return self.cmdclass[command] + + for ep in pkg_resources.iter_entry_points('distutils.commands',command): + ep.require(installer=self.fetch_build_egg) + self.cmdclass[command] = cmdclass = ep.load() + return cmdclass + else: + return _Distribution.get_command_class(self, command) + + def print_commands(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + cmdclass = ep.load(False) # don't require extras, we're not running + self.cmdclass[ep.name] = cmdclass + return _Distribution.print_commands(self) + + + + + + def _set_feature(self,name,status): + """Set feature's inclusion status""" + setattr(self,self._feature_attrname(name),status) + + def feature_is_included(self,name): + """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" + return getattr(self,self._feature_attrname(name)) + + def include_feature(self,name): + """Request inclusion of feature named 'name'""" + + if self.feature_is_included(name)==0: + descr = self.features[name].description + raise DistutilsOptionError( + descr + " is required, but was excluded or is not available" + ) + self.features[name].include_in(self) + self._set_feature(name,1) + + def include(self,**attrs): + """Add items to distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would add 'x' to + the distribution's 'py_modules' attribute, if it was not already + there. + + Currently, this method only supports inclusion for attributes that are + lists or tuples. If you need to add support for adding to other + attributes in this or a subclass, you can add an '_include_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' + will try to call 'dist._include_foo({"bar":"baz"})', which can then + handle whatever special inclusion logic is needed. + """ + for k,v in attrs.items(): + include = getattr(self, '_include_'+k, None) + if include: + include(v) + else: + self._include_misc(k,v) + + def exclude_package(self,package): + """Remove packages, modules, and extensions in named package""" + + pfx = package+'.' + if self.packages: + self.packages = [ + p for p in self.packages + if p!=package and not p.startswith(pfx) + ] + + if self.py_modules: + self.py_modules = [ + p for p in self.py_modules + if p!=package and not p.startswith(pfx) + ] + + if self.ext_modules: + self.ext_modules = [ + p for p in self.ext_modules + if p.name!=package and not p.name.startswith(pfx) + ] + + + def has_contents_for(self,package): + """Return true if 'exclude_package(package)' would do something""" + + pfx = package+'.' + + for p in self.iter_distribution_names(): + if p==package or p.startswith(pfx): + return True + + + + + + + + + + + def _exclude_misc(self,name,value): + """Handle 'exclude()' for list/tuple attrs without a special handler""" + if not isinstance(value,sequence): + raise DistutilsSetupError( + "%s: setting must be a list or tuple (%r)" % (name, value) + ) + try: + old = getattr(self,name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is not None and not isinstance(old,sequence): + raise DistutilsSetupError( + name+": this setting cannot be changed via include/exclude" + ) + elif old: + setattr(self,name,[item for item in old if item not in value]) + + def _include_misc(self,name,value): + """Handle 'include()' for list/tuple attrs without a special handler""" + + if not isinstance(value,sequence): + raise DistutilsSetupError( + "%s: setting must be a list (%r)" % (name, value) + ) + try: + old = getattr(self,name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is None: + setattr(self,name,value) + elif not isinstance(old,sequence): + raise DistutilsSetupError( + name+": this setting cannot be changed via include/exclude" + ) + else: + setattr(self,name,old+[item for item in value if item not in old]) + + def exclude(self,**attrs): + """Remove items from distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from + the distribution's 'py_modules' attribute. Excluding packages uses + the 'exclude_package()' method, so all of the package's contained + packages, modules, and extensions are also excluded. + + Currently, this method only supports exclusion from attributes that are + lists or tuples. If you need to add support for excluding from other + attributes in this or a subclass, you can add an '_exclude_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' + will try to call 'dist._exclude_foo({"bar":"baz"})', which can then + handle whatever special exclusion logic is needed. + """ + for k,v in attrs.items(): + exclude = getattr(self, '_exclude_'+k, None) + if exclude: + exclude(v) + else: + self._exclude_misc(k,v) + + def _exclude_packages(self,packages): + if not isinstance(packages,sequence): + raise DistutilsSetupError( + "packages: setting must be a list or tuple (%r)" % (packages,) + ) + map(self.exclude_package, packages) + + + + + + + + + + + + + def _parse_command_opts(self, parser, args): + # Remove --with-X/--without-X options when processing command args + self.global_options = self.__class__.global_options + self.negative_opt = self.__class__.negative_opt + + # First, expand any aliases + command = args[0] + aliases = self.get_option_dict('aliases') + while command in aliases: + src,alias = aliases[command] + del aliases[command] # ensure each alias can expand only once! + import shlex + args[:1] = shlex.split(alias,True) + command = args[0] + + nargs = _Distribution._parse_command_opts(self, parser, args) + + # Handle commands that want to consume all remaining arguments + cmd_class = self.get_command_class(command) + if getattr(cmd_class,'command_consumes_arguments',None): + self.get_option_dict(command)['args'] = ("command line", nargs) + if nargs is not None: + return [] + + return nargs + + + + + + + + + + + + + + + + + def get_cmdline_options(self): + """Return a '{cmd: {opt:val}}' map of all command-line options + + Option names are all long, but do not include the leading '--', and + contain dashes rather than underscores. If the option doesn't take + an argument (e.g. '--quiet'), the 'val' is 'None'. + + Note that options provided by config files are intentionally excluded. + """ + + d = {} + + for cmd,opts in self.command_options.items(): + + for opt,(src,val) in opts.items(): + + if src != "command line": + continue + + opt = opt.replace('_','-') + + if val==0: + cmdobj = self.get_command_obj(cmd) + neg_opt = self.negative_opt.copy() + neg_opt.update(getattr(cmdobj,'negative_opt',{})) + for neg,pos in neg_opt.items(): + if pos==opt: + opt=neg + val=None + break + else: + raise AssertionError("Shouldn't be able to get here") + + elif val==1: + val = None + + d.setdefault(cmd,{})[opt] = val + + return d + + + def iter_distribution_names(self): + """Yield all packages, modules, and extension names in distribution""" + + for pkg in self.packages or (): + yield pkg + + for module in self.py_modules or (): + yield module + + for ext in self.ext_modules or (): + if isinstance(ext,tuple): + name, buildinfo = ext + else: + name = ext.name + if name.endswith('module'): + name = name[:-6] + yield name + +# Install it throughout the distutils +for module in distutils.dist, distutils.core, distutils.cmd: + module.Distribution = Distribution + + + + + + + + + + + + + + + + + + + + +class Feature: + """A subset of the distribution that can be excluded if unneeded/wanted + + Features are created using these keyword arguments: + + 'description' -- a short, human readable description of the feature, to + be used in error messages, and option help messages. + + 'standard' -- if true, the feature is included by default if it is + available on the current system. Otherwise, the feature is only + included if requested via a command line '--with-X' option, or if + another included feature requires it. The default setting is 'False'. + + 'available' -- if true, the feature is available for installation on the + current system. The default setting is 'True'. + + 'optional' -- if true, the feature's inclusion can be controlled from the + command line, using the '--with-X' or '--without-X' options. If + false, the feature's inclusion status is determined automatically, + based on 'availabile', 'standard', and whether any other feature + requires it. The default setting is 'True'. + + 'require_features' -- a string or sequence of strings naming features + that should also be included if this feature is included. Defaults to + empty list. May also contain 'Require' objects that should be + added/removed from the distribution. + + 'remove' -- a string or list of strings naming packages to be removed + from the distribution if this feature is *not* included. If the + feature *is* included, this argument is ignored. This argument exists + to support removing features that "crosscut" a distribution, such as + defining a 'tests' feature that removes all the 'tests' subpackages + provided by other features. The default for this argument is an empty + list. (Note: the named package(s) or modules must exist in the base + distribution when the 'setup()' function is initially called.) + + other keywords -- any other keyword arguments are saved, and passed to + the distribution's 'include()' and 'exclude()' methods when the + feature is included or excluded, respectively. So, for example, you + could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be + added or removed from the distribution as appropriate. + + A feature must include at least one 'requires', 'remove', or other + keyword argument. Otherwise, it can't affect the distribution in any way. + Note also that you can subclass 'Feature' to create your own specialized + feature types that modify the distribution in other ways when included or + excluded. See the docstrings for the various methods here for more detail. + Aside from the methods, the only feature attributes that distributions look + at are 'description' and 'optional'. + """ + def __init__(self, description, standard=False, available=True, + optional=True, require_features=(), remove=(), **extras + ): + + self.description = description + self.standard = standard + self.available = available + self.optional = optional + if isinstance(require_features,(str,Require)): + require_features = require_features, + + self.require_features = [ + r for r in require_features if isinstance(r,str) + ] + er = [r for r in require_features if not isinstance(r,str)] + if er: extras['require_features'] = er + + if isinstance(remove,str): + remove = remove, + self.remove = remove + self.extras = extras + + if not remove and not require_features and not extras: + raise DistutilsSetupError( + "Feature %s: must define 'require_features', 'remove', or at least one" + " of 'packages', 'py_modules', etc." + ) + + def include_by_default(self): + """Should this feature be included by default?""" + return self.available and self.standard + + def include_in(self,dist): + + """Ensure feature and its requirements are included in distribution + + You may override this in a subclass to perform additional operations on + the distribution. Note that this method may be called more than once + per feature, and so should be idempotent. + + """ + + if not self.available: + raise DistutilsPlatformError( + self.description+" is required," + "but is not available on this platform" + ) + + dist.include(**self.extras) + + for f in self.require_features: + dist.include_feature(f) + + + + def exclude_from(self,dist): + + """Ensure feature is excluded from distribution + + You may override this in a subclass to perform additional operations on + the distribution. This method will be called at most once per + feature, and only after all included features have been asked to + include themselves. + """ + + dist.exclude(**self.extras) + + if self.remove: + for item in self.remove: + dist.exclude_package(item) + + + + def validate(self,dist): + + """Verify that feature makes sense in context of distribution + + This method is called by the distribution just before it parses its + command line. It checks to ensure that the 'remove' attribute, if any, + contains only valid package/module names that are present in the base + distribution when 'setup()' is called. You may override it in a + subclass to perform any other required validation of the feature + against a target distribution. + """ + + for item in self.remove: + if not dist.has_contents_for(item): + raise DistutilsSetupError( + "%s wants to be able to remove %s, but the distribution" + " doesn't contain any packages or modules under %s" + % (self.description, item, item) + ) + + + + + + + + + + + + + + + + + + + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/extension.py b/setuptools-0.6c16dev3.egg/setuptools/extension.py new file mode 100644 index 0000000..cfcf55b --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/extension.py @@ -0,0 +1,35 @@ +from distutils.core import Extension as _Extension +from dist import _get_unpatched +_Extension = _get_unpatched(_Extension) + +try: + from Pyrex.Distutils.build_ext import build_ext +except ImportError: + have_pyrex = False +else: + have_pyrex = True + + +class Extension(_Extension): + """Extension that uses '.c' files in place of '.pyx' files""" + + if not have_pyrex: + # convert .pyx extensions to .c + def __init__(self,*args,**kw): + _Extension.__init__(self,*args,**kw) + sources = [] + for s in self.sources: + if s.endswith('.pyx'): + sources.append(s[:-3]+'c') + else: + sources.append(s) + self.sources = sources + +class Library(Extension): + """Just like a regular Extension, but built as a library instead""" + +import sys, distutils.core, distutils.extension +distutils.core.Extension = Extension +distutils.extension.Extension = Extension +if 'distutils.command.build_ext' in sys.modules: + sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/setuptools-0.6c16dev3.egg/setuptools/package_index.py b/setuptools-0.6c16dev3.egg/setuptools/package_index.py new file mode 100644 index 0000000..3d82d93 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/package_index.py @@ -0,0 +1,798 @@ +"""PyPI and direct package downloading""" +import sys, os.path, re, urlparse, urllib2, shutil, random, socket, cStringIO +import httplib, urllib +from pkg_resources import * +from distutils import log +from distutils.errors import DistutilsError +try: + from hashlib import md5 +except ImportError: + from md5 import md5 +from fnmatch import translate +EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') +HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) +# this is here to fix emacs' cruddy broken syntax highlighting +PYPI_MD5 = re.compile( + '([^<]+)\n\s+\\(md5\\)' +) +URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match +EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() + +def is_local(url_or_fname): + """ Return True if url_or_fname is a "file:" url or if it is a schemaless thing (which is presumably a filename). """ + mo = URL_SCHEME(url_or_fname) + return not (mo and mo.group(1).lower()!='file') + +def url_or_fname_to_fname(url_or_fname): + """ Assert that is_local(url_or_fname) then if it is a "file:" url, parse it and run url2pathname on it, else just return it. """ + assert is_local(url_or_fname) + + mo = URL_SCHEME(url_or_fname) + if mo: + return urllib2.url2pathname(urlparse.urlparse(url)[2]) + else: + return url_or_fname + +__all__ = [ + 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', + 'interpret_distro_name', +] + +def parse_bdist_wininst(name): + """Return (base,pyversion) or (None,None) for possible .exe name""" + + lower = name.lower() + base, py_ver = None, None + + if lower.endswith('.exe'): + if lower.endswith('.win32.exe'): + base = name[:-10] + elif lower.startswith('.win32-py',-16): + py_ver = name[-7:-4] + base = name[:-16] + + return base,py_ver + +def egg_info_for_url(url): + scheme, server, path, parameters, query, fragment = urlparse.urlparse(url) + base = urllib2.unquote(path.split('/')[-1]) + if server=='sourceforge.net' and base=='download': # XXX Yuck + base = urllib2.unquote(path.split('/')[-2]) + if '#' in base: base, fragment = base.split('#',1) + return base,fragment + +def distros_for_url(url, metadata=None): + """Yield egg or source distribution objects that might be found at a URL""" + base, fragment = egg_info_for_url(url) + for dist in distros_for_location(url, base, metadata): yield dist + if fragment: + match = EGG_FRAGMENT.match(fragment) + if match: + for dist in interpret_distro_name( + url, match.group(1), metadata, precedence = CHECKOUT_DIST + ): + yield dist + +def distros_for_location(location, basename, metadata=None): + """Yield egg or source distribution objects based on basename""" + if basename.endswith('.egg.zip'): + basename = basename[:-4] # strip the .zip + if basename.endswith('.egg') and '-' in basename: + # only one, unambiguous interpretation + return [Distribution.from_location(location, basename, metadata)] + if basename.endswith('.exe'): + win_base, py_ver = parse_bdist_wininst(basename) + if win_base is not None: + return interpret_distro_name( + location, win_base, metadata, py_ver, BINARY_DIST, "win32" + ) + # Try source distro extensions (.zip, .tgz, etc.) + # + for ext in EXTENSIONS: + if basename.endswith(ext): + basename = basename[:-len(ext)] + return interpret_distro_name(location, basename, metadata) + return [] # no extension matched + +def distros_for_filename(filename, metadata=None): + """Yield possible egg or source distribution objects based on a filename""" + return distros_for_location( + normalize_path(filename), os.path.basename(filename), metadata + ) + + +def interpret_distro_name(location, basename, metadata, + py_version=None, precedence=SOURCE_DIST, platform=None +): + """Generate alternative interpretations of a source distro name + + Note: if `location` is a filesystem filename, you should call + ``pkg_resources.normalize_path()`` on it before passing it to this + routine! + """ + # Generate alternative interpretations of a source distro name + # Because some packages are ambiguous as to name/versions split + # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. + # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" + # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, + # the spurious interpretations should be ignored, because in the event + # there's also an "adns" package, the spurious "python-1.1.0" version will + # compare lower than any numeric version number, and is therefore unlikely + # to match a request for it. It's still a potential problem, though, and + # in the long run PyPI and the distutils should go for "safe" names and + # versions in distribution archive names (sdist and bdist). + + parts = basename.split('-') + if not py_version: + for i,p in enumerate(parts[2:]): + if len(p)==5 and p.startswith('py2.'): + return # It's a bdist_dumb, not an sdist -- bail out + + for p in range(1,len(parts)+1): + yield Distribution( + location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), + py_version=py_version, precedence = precedence, + platform = platform + ) + +REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) +# this line is here to fix emacs' cruddy broken syntax highlighting + +def find_external_links(url, page): + """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" + + for match in REL.finditer(page): + tag, rel = match.groups() + rels = map(str.strip, rel.lower().split(',')) + if 'homepage' in rels or 'download' in rels: + for match in HREF.finditer(tag): + yield urlparse.urljoin(url, htmldecode(match.group(1))) + + for tag in ("Home Page", "Download URL"): + pos = page.find(tag) + if pos!=-1: + match = HREF.search(page,pos) + if match: + yield urlparse.urljoin(url, htmldecode(match.group(1))) + +user_agent = "Python-urllib/%s setuptools/%s" % ( + urllib2.__version__, require('setuptools')[0].version +) + + +class PackageIndex(Environment): + """A distribution index that scans web pages for download URLs""" + + def __init__(self, index_url="http://pypi.python.org/simple", hosts=('*',), + *args, **kw + ): + Environment.__init__(self,*args,**kw) + self.index_url = index_url + "/"[:not index_url.endswith('/')] + self.scanned_urls = {} + self.fetched_urls = {} + self.package_pages = {} + self.allows = re.compile('|'.join(map(translate,hosts))).match + self.to_scan = [] + + + + def process_url(self, url, retrieve=False): + """Evaluate a URL as a possible download, and maybe retrieve it""" + if url in self.scanned_urls and not retrieve: + return + self.scanned_urls[url] = True + if not URL_SCHEME(url): + self.process_filename(url) + return + else: + dists = list(distros_for_url(url)) + if dists: + if not self.url_ok(url): + return + self.debug("Found link: %s", url) + + if dists or not retrieve or url in self.fetched_urls: + map(self.add, dists) + return # don't need the actual page + + if not self.url_ok(url): + self.fetched_urls[url] = True + return + + self.info("Reading %s", url) + self.fetched_urls[url] = True # prevent multiple fetch attempts + f = self.open_url(url, "Download error: %s -- Some packages may not be found!") + if f is None: return + self.fetched_urls[f.url] = True + if 'html' not in f.headers.get('content-type', '').lower(): + f.close() # not html, we can't process it + return + + base = f.url # handle redirects + page = f.read() + f.close() + if url.startswith(self.index_url) and getattr(f,'code',None)!=404: + page = self.process_index(url, page) + for match in HREF.finditer(page): + link = urlparse.urljoin(base, htmldecode(match.group(1))) + self.process_url(link) + + def process_filename(self, fn, nested=False): + # process filenames or directories + if not os.path.exists(fn): + self.warn("Not found: %s", fn) + return + + if os.path.isdir(fn) and not nested: + path = os.path.realpath(fn) + for item in os.listdir(path): + self.process_filename(os.path.join(path,item), True) + + dists = distros_for_filename(fn) + if dists: + self.debug("Found: %s", fn) + map(self.add, dists) + + def url_ok(self, url, fatal=False): + s = URL_SCHEME(url) + if (s and s.group(1).lower()=='file') or self.allows(urlparse.urlparse(url)[1]): + return True + msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n" + if fatal: + raise DistutilsError(msg % url) + else: + self.warn(msg, url) + + def scan_egg_links(self, search_path): + for item in search_path: + if os.path.isdir(item): + for entry in os.listdir(item): + if entry.endswith('.egg-link'): + self.scan_egg_link(item, entry) + + def scan_egg_link(self, path, entry): + lines = filter(None, map(str.strip, file(os.path.join(path, entry)))) + if len(lines)==2: + for dist in find_distributions(os.path.join(path, lines[0])): + dist.location = os.path.join(path, *lines) + dist.precedence = SOURCE_DIST + self.add(dist) + + def process_index(self,url,page): + """Process the contents of a PyPI page""" + def scan(link): + # Process a URL to see if it's for a package page + if link.startswith(self.index_url): + parts = map( + urllib2.unquote, link[len(self.index_url):].split('/') + ) + if len(parts)==2 and '#' not in parts[1]: + # it's a package page, sanitize and index it + pkg = safe_name(parts[0]) + ver = safe_version(parts[1]) + self.package_pages.setdefault(pkg.lower(),{})[link] = True + return to_filename(pkg), to_filename(ver) + return None, None + + # process an index page into the package-page index + for match in HREF.finditer(page): + scan( urlparse.urljoin(url, htmldecode(match.group(1))) ) + + pkg, ver = scan(url) # ensure this page is in the page index + if pkg: + # process individual package page + for new_url in find_external_links(url, page): + # Process the found URL + base, frag = egg_info_for_url(new_url) + if base.endswith('.py') and not frag: + if ver: + new_url+='#egg=%s-%s' % (pkg,ver) + else: + self.need_version_info(url) + self.scan_url(new_url) + + return PYPI_MD5.sub( + lambda m: '%s' % m.group(1,3,2), page + ) + else: + return "" # no sense double-scanning non-package pages + + + + def need_version_info(self, url): + self.scan_all( + "Page at %s links to .py file(s) without version info; an index " + "scan is required.", url + ) + + def scan_all(self, msg=None, *args): + if self.index_url not in self.fetched_urls: + if msg: self.warn(msg,*args) + self.info( + "Scanning index of all packages (this may take a while)" + ) + self.scan_url(self.index_url) + + def find_packages(self, requirement): + self.scan_url(self.index_url + requirement.unsafe_name+'/') + + if not self.package_pages.get(requirement.key): + # Fall back to safe version of the name + self.scan_url(self.index_url + requirement.project_name+'/') + + if not self.package_pages.get(requirement.key): + # We couldn't find the target package, so search the index page too + self.not_found_in_index(requirement) + + for url in list(self.package_pages.get(requirement.key,())): + # scan each page that might be related to the desired package + self.scan_url(url) + + def obtain(self, requirement, installer=None): + self.prescan(); self.find_packages(requirement) + for dist in self[requirement.key]: + if dist in requirement: + return dist + self.debug("%s does not match %s", requirement, dist) + return super(PackageIndex, self).obtain(requirement,installer) + + + + + + def check_md5(self, cs, info, filename, tfp): + if re.match('md5=[0-9a-f]{32}$', info): + self.debug("Validating md5 checksum for %s", filename) + if cs.hexdigest()!=info[4:]: + tfp.close() + os.unlink(filename) + raise DistutilsError( + "MD5 validation failed for "+os.path.basename(filename)+ + "; possible download problem?" + ) + + def add_find_links(self, urls): + """Add `urls` to the list that will be prescanned for searches""" + for url in urls: + if ( + self.to_scan is None # if we have already "gone online" + or not URL_SCHEME(url) # or it's a local file/directory + or url.startswith('file:') + or list(distros_for_url(url)) # or a direct package link + ): + # then go ahead and process it now + self.scan_url(url) + else: + # otherwise, defer retrieval till later + self.to_scan.append(url) + + def prescan(self): + """Scan urls scheduled for prescanning (e.g. --find-links)""" + if self.to_scan: + map(self.scan_url, self.to_scan) + self.to_scan = None # from now on, go ahead and process immediately + + def not_found_in_index(self, requirement): + if self[requirement.key]: # we've seen at least one distro + meth, msg = self.info, "Couldn't retrieve index page for %r" + else: # no distros seen for this name, might be misspelled + meth, msg = (self.warn, + "Couldn't find index page for %r (maybe misspelled?)") + meth(msg, requirement.unsafe_name) + self.scan_all() + + def download(self, spec, tmpdir): + """Locate and/or download `spec` to `tmpdir`, returning a local path + + `spec` may be a ``Requirement`` object, or a string containing a URL, + an existing local filename, or a project/version requirement spec + (i.e. the string form of a ``Requirement`` object). If it is the URL + of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one + that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is + automatically created alongside the downloaded file. + + If `spec` is a ``Requirement`` object or a string containing a + project/version requirement spec, this method returns the location of + a matching distribution (possibly after downloading it to `tmpdir`). + If `spec` is a locally existing file or directory name, it is simply + returned unchanged. If `spec` is a URL, it is downloaded to a subpath + of `tmpdir`, and the local filename is returned. Various errors may be + raised if a problem occurs during downloading. + """ + if not isinstance(spec,Requirement): + scheme = URL_SCHEME(spec) + if scheme: + # It's a url, download it to tmpdir + found = self._download_url(scheme.group(1), spec, tmpdir) + base, fragment = egg_info_for_url(spec) + if base.endswith('.py'): + found = self.gen_setup(found,fragment,tmpdir) + return found + elif os.path.exists(spec): + # Existing file or directory, just return it + return spec + else: + try: + spec = Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % + (spec,) + ) + return getattr(self.fetch_distribution(spec, tmpdir),'location',None) + + + def fetch_distribution(self, + requirement, tmpdir, force_scan=False, source=False, develop_ok=False, + local_index=None, + ): + """Obtain a distribution suitable for fulfilling `requirement` + + `requirement` must be a ``pkg_resources.Requirement`` instance. + If necessary, or if the `force_scan` flag is set, the requirement is + searched for in the (online) package index as well as the locally + installed packages. If a distribution matching `requirement` is found, + the returned distribution's ``location`` is the value you would have + gotten from calling the ``download()`` method with the matching + distribution's URL or filename. If no matching distribution is found, + ``None`` is returned. + + If the `source` flag is set, only source distributions and source + checkout links will be considered. Unless the `develop_ok` flag is + set, development and system eggs (i.e., those using the ``.egg-info`` + format) will be ignored. + """ + # process a Requirement + self.info("Searching for %s", requirement) + skipped = {} + dist = None + + def find(env, req): + # Find a matching distribution; may be called more than once + + # first try to find a local dist + for allow_remote in (False, True): + # then try to find a platform-dependent dist + for allow_platform_independent in (False, True): + for dist in env[req.key]: + if dist.precedence==DEVELOP_DIST and not develop_ok: + if dist not in skipped: + self.warn("Skipping development or system egg: %s",dist) + skipped[dist] = 1 + continue + + if ((is_local(dist.location) or allow_remote) and + (dist in req) and + ((allow_platform_independent or dist.platform is not None) and + (dist.precedence<=SOURCE_DIST or not source))): + return dist + + if force_scan: + self.prescan() + self.find_packages(requirement) + dist = find(self, requirement) + + if local_index is not None: + dist = dist or find(local_index, requirement) + + if dist is None and self.to_scan is not None: + self.prescan() + dist = find(self, requirement) + + if dist is None and not force_scan: + self.find_packages(requirement) + dist = find(self, requirement) + + if dist is None: + self.warn( + "No local packages or download links found for %s%s", + (source and "a source distribution of " or ""), + requirement, + ) + else: + self.info("Best match: %s", dist) + return dist.clone(location=self.download(dist.location, tmpdir)) + + + def fetch(self, requirement, tmpdir, force_scan=False, source=False): + """Obtain a file suitable for fulfilling `requirement` + + DEPRECATED; use the ``fetch_distribution()`` method now instead. For + backward compatibility, this routine is identical but returns the + ``location`` of the downloaded distribution instead of a distribution + object. + """ + dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) + if dist is not None: + return dist.location + return None + + + def gen_setup(self, filename, fragment, tmpdir): + match = EGG_FRAGMENT.match(fragment) + dists = match and [d for d in + interpret_distro_name(filename, match.group(1), None) if d.version + ] or [] + + if len(dists)==1: # unambiguous ``#egg`` fragment + basename = os.path.basename(filename) + + # Make sure the file has been downloaded to the temp dir. + if os.path.dirname(filename) != tmpdir: + dst = os.path.join(tmpdir, basename) + from setuptools.command.easy_install import samefile + if not samefile(filename, dst): + shutil.copy2(filename, dst) + filename=dst + + file = open(os.path.join(tmpdir, 'setup.py'), 'w') + file.write( + "from setuptools import setup\n" + "setup(name=%r, version=%r, py_modules=[%r])\n" + % ( + dists[0].project_name, dists[0].version, + os.path.splitext(basename)[0] + ) + ) + file.close() + return filename + + elif match: + raise DistutilsError( + "Can't unambiguously interpret project/version identifier %r; " + "any dashes in the name or version should be escaped using " + "underscores. %r" % (fragment,dists) + ) + else: + raise DistutilsError( + "Can't process plain .py files without an '#egg=name-version'" + " suffix to enable automatic setup script generation." + ) + + dl_blocksize = 8192 + def _download_to(self, url, filename): + self.info("Downloading %s", url) + # Download the file + fp, tfp, info = None, None, None + try: + if '#' in url: + url, info = url.split('#', 1) + fp = self.open_url(url) + if isinstance(fp, urllib2.HTTPError): + raise DistutilsError( + "Can't download %s: %s %s" % (url, fp.code,fp.msg) + ) + cs = md5() + headers = fp.info() + blocknum = 0 + bs = self.dl_blocksize + size = -1 + if "content-length" in headers: + size = int(headers["Content-Length"]) + self.reporthook(url, filename, blocknum, bs, size) + tfp = open(filename,'wb') + while True: + block = fp.read(bs) + if block: + cs.update(block) + tfp.write(block) + blocknum += 1 + self.reporthook(url, filename, blocknum, bs, size) + else: + break + if info: self.check_md5(cs, info, filename, tfp) + return headers + finally: + if fp: fp.close() + if tfp: tfp.close() + + def reporthook(self, url, filename, blocknum, blksize, size): + pass # no-op + + + def open_url(self, url, warning=None): + if url.startswith('file:'): return local_open(url) + try: + return open_with_auth(url) + except urllib2.HTTPError, v: + return v + except urllib2.URLError, v: + reason = v.reason + except httplib.HTTPException, v: + reason = "%s: %s" % (v.__doc__ or v.__class__.__name__, v) + if warning: + self.warn(warning, reason) + else: + raise DistutilsError("Download error for %s: %s" % (url, reason)) + + def _download_url(self, scheme, url, tmpdir): + # Determine download filename + # + name, fragment = egg_info_for_url(url) + if name: + while '..' in name: + name = name.replace('..','.').replace('\\','_') + else: + name = "__downloaded__" # default if URL has no path contents + + if name.endswith('.egg.zip'): + name = name[:-4] # strip the extra .zip before download + + filename = os.path.join(tmpdir,name) + + # Download the file + # + if scheme=='svn' or scheme.startswith('svn+'): + return self._download_svn(url, filename) + elif scheme=='file': + return urllib2.url2pathname(urlparse.urlparse(url)[2]) + else: + self.url_ok(url, True) # raises error if not allowed + return self._attempt_download(url, filename) + + + def scan_url(self, url): + self.process_url(url, True) + + + def _attempt_download(self, url, filename): + headers = self._download_to(url, filename) + if 'html' in headers.get('content-type','').lower(): + return self._download_html(url, headers, filename) + else: + return filename + + def _download_html(self, url, headers, filename): + file = open(filename) + for line in file: + if line.strip(): + # Check for a subversion index page + if re.search(r'([^- ]+ - )?Revision \d+:', line): + # it's a subversion index page: + file.close() + os.unlink(filename) + return self._download_svn(url, filename) + break # not an index page + file.close() + os.unlink(filename) + raise DistutilsError("Unexpected HTML page found at "+url) + + def _download_svn(self, url, filename): + url = url.split('#',1)[0] # remove any fragment for svn's sake + self.info("Doing subversion checkout from %s to %s", url, filename) + os.system("svn checkout -q %s %s" % (url, filename)) + return filename + + def debug(self, msg, *args): + log.debug(msg, *args) + + def info(self, msg, *args): + log.info(msg, *args) + + def warn(self, msg, *args): + log.warn(msg, *args) + +# This pattern matches a character entity reference (a decimal numeric +# references, a hexadecimal numeric reference, or a named reference). +entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub + +def uchr(c): + if not isinstance(c, int): + return c + if c>255: return unichr(c) + return chr(c) + +def decode_entity(match): + what = match.group(1) + if what.startswith('#x'): + what = int(what[2:], 16) + elif what.startswith('#'): + what = int(what[1:]) + else: + from htmlentitydefs import name2codepoint + what = name2codepoint.get(what, match.group(0)) + return uchr(what) + +def htmldecode(text): + """Decode HTML entities in the given text.""" + return entity_sub(decode_entity, text) + + + + + + + + + + + + + + + + + +def open_with_auth(url): + """Open a urllib2 request, handling HTTP authentication""" + + scheme, netloc, path, params, query, frag = urlparse.urlparse(url) + + if scheme in ('http', 'https'): + auth, host = urllib.splituser(netloc) + else: + auth = None + + if auth: + auth = "Basic " + urllib2.unquote(auth).encode('base64').strip() + new_url = urlparse.urlunparse((scheme,host,path,params,query,frag)) + request = urllib2.Request(new_url) + request.add_header("Authorization", auth) + else: + request = urllib2.Request(url) + + request.add_header('User-Agent', user_agent) + fp = urllib2.urlopen(request) + + if auth: + # Put authentication info back into request URL if same host, + # so that links found on the page will work + s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url) + if s2==scheme and h2==host: + fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2)) + + return fp + + + + + + + + + + + + +def fix_sf_url(url): + return url # backward compatibility + +def local_open(url): + """Read a local path, with special support for directories""" + scheme, server, path, param, query, frag = urlparse.urlparse(url) + filename = urllib2.url2pathname(path) + if os.path.isfile(filename): + return urllib2.urlopen(url) + elif path.endswith('/') and os.path.isdir(filename): + files = [] + for f in os.listdir(filename): + if f=='index.html': + body = open(os.path.join(filename,f),'rb').read() + break + elif os.path.isdir(os.path.join(filename,f)): + f+='/' + files.append("<a href=%r>%s</a>" % (f,f)) + else: + body = ("<html><head><title>%s" % url) + \ + "%s" % '\n'.join(files) + status, message = 200, "OK" + else: + status, message, body = 404, "Path not found", "Not found" + + return urllib2.HTTPError(url, status, message, + {'content-type':'text/html'}, cStringIO.StringIO(body)) + + + + + + + + + + + + + +# this line is a kludge to keep the trailing blank lines for pje's editor diff --git a/setuptools-0.6c16dev3.egg/setuptools/sandbox.py b/setuptools-0.6c16dev3.egg/setuptools/sandbox.py new file mode 100644 index 0000000..4c5e712 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/sandbox.py @@ -0,0 +1,287 @@ +import os, sys, __builtin__, tempfile, operator, pkg_resources +_os = sys.modules[os.name] +_open = open +_file = file + +from distutils.errors import DistutilsError +from pkg_resources import working_set + +__all__ = [ + "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", +] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +def run_setup(setup_script, args): + """Run a distutils setup script, sandboxed in its directory""" + old_dir = os.getcwd() + save_argv = sys.argv[:] + save_path = sys.path[:] + setup_dir = os.path.abspath(os.path.dirname(setup_script)) + temp_dir = os.path.join(setup_dir,'temp') + if not os.path.isdir(temp_dir): os.makedirs(temp_dir) + save_tmp = tempfile.tempdir + save_modules = sys.modules.copy() + pr_state = pkg_resources.__getstate__() + try: + tempfile.tempdir = temp_dir; os.chdir(setup_dir) + try: + sys.argv[:] = [setup_script]+list(args) + sys.path.insert(0, setup_dir) + # reset to include setup dir, w/clean callback list + working_set.__init__() + working_set.callbacks.append(lambda dist:dist.activate()) + DirectorySandbox(setup_dir).run( + lambda: execfile( + "setup.py", + {'__file__':setup_script, '__name__':'__main__'} + ) + ) + except SystemExit, v: + if v.args and v.args[0]: + raise + # Normal exit, just return + finally: + pkg_resources.__setstate__(pr_state) + sys.modules.update(save_modules) + for key in list(sys.modules): + if key not in save_modules: del sys.modules[key] + os.chdir(old_dir) + sys.path[:] = save_path + sys.argv[:] = save_argv + tempfile.tempdir = save_tmp + + + +class AbstractSandbox: + """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" + + _active = False + + def __init__(self): + self._attrs = [ + name for name in dir(_os) + if not name.startswith('_') and hasattr(self,name) + ] + + def _copy(self, source): + for name in self._attrs: + setattr(os, name, getattr(source,name)) + + def run(self, func): + """Run 'func' under os sandboxing""" + try: + self._copy(self) + __builtin__.file = self._file + __builtin__.open = self._open + self._active = True + return func() + finally: + self._active = False + __builtin__.open = _open + __builtin__.file = _file + self._copy(_os) + + def _mk_dual_path_wrapper(name): + original = getattr(_os,name) + def wrap(self,src,dst,*args,**kw): + if self._active: + src,dst = self._remap_pair(name,src,dst,*args,**kw) + return original(src,dst,*args,**kw) + return wrap + + for name in ["rename", "link", "symlink"]: + if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) + + + def _mk_single_path_wrapper(name, original=None): + original = original or getattr(_os,name) + def wrap(self,path,*args,**kw): + if self._active: + path = self._remap_input(name,path,*args,**kw) + return original(path,*args,**kw) + return wrap + + _open = _mk_single_path_wrapper('open', _open) + _file = _mk_single_path_wrapper('file', _file) + for name in [ + "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", + "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", + "startfile", "mkfifo", "mknod", "pathconf", "access" + ]: + if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) + + def _mk_single_with_return(name): + original = getattr(_os,name) + def wrap(self,path,*args,**kw): + if self._active: + path = self._remap_input(name,path,*args,**kw) + return self._remap_output(name, original(path,*args,**kw)) + return original(path,*args,**kw) + return wrap + + for name in ['readlink', 'tempnam']: + if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) + + def _mk_query(name): + original = getattr(_os,name) + def wrap(self,*args,**kw): + retval = original(*args,**kw) + if self._active: + return self._remap_output(name, retval) + return retval + return wrap + + for name in ['getcwd', 'tmpnam']: + if hasattr(_os,name): locals()[name] = _mk_query(name) + + def _validate_path(self,path): + """Called to remap or validate any path, whether input or output""" + return path + + def _remap_input(self,operation,path,*args,**kw): + """Called for path inputs""" + return self._validate_path(path) + + def _remap_output(self,operation,path): + """Called for path outputs""" + return self._validate_path(path) + + def _remap_pair(self,operation,src,dst,*args,**kw): + """Called for path pairs like rename, link, and symlink operations""" + return ( + self._remap_input(operation+'-from',src,*args,**kw), + self._remap_input(operation+'-to',dst,*args,**kw) + ) + + +class DirectorySandbox(AbstractSandbox): + """Restrict operations to a single subdirectory - pseudo-chroot""" + + write_ops = dict.fromkeys([ + "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", + "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", + ]) + + def __init__(self,sandbox): + self._sandbox = os.path.normcase(os.path.realpath(sandbox)) + self._prefix = os.path.join(self._sandbox,'') + AbstractSandbox.__init__(self) + + def _violation(self, operation, *args, **kw): + raise SandboxViolation(operation, args, kw) + + def _open(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("open", path, mode, *args, **kw) + return _open(path,mode,*args,**kw) + + def tmpnam(self): self._violation("tmpnam") + + def _ok(self,path): + if hasattr(_os,'devnull') and path==_os.devnull: return True + active = self._active + try: + self._active = False + realpath = os.path.normcase(os.path.realpath(path)) + if realpath==self._sandbox or realpath.startswith(self._prefix): + return True + finally: + self._active = active + + def _remap_input(self,operation,path,*args,**kw): + """Called for path inputs""" + if operation in self.write_ops and not self._ok(path): + self._violation(operation, os.path.realpath(path), *args, **kw) + return path + + def _remap_pair(self,operation,src,dst,*args,**kw): + """Called for path pairs like rename, link, and symlink operations""" + if not self._ok(src) or not self._ok(dst): + self._violation(operation, src, dst, *args, **kw) + return (src,dst) + + def _file(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("file", path, mode, *args, **kw) + return _file(path,mode,*args,**kw) + + def open(self, file, flags, mode=0777): + """Called for low-level os.open()""" + if flags & WRITE_FLAGS and not self._ok(file): + self._violation("os.open", file, flags, mode) + return _os.open(file,flags,mode) + +WRITE_FLAGS = reduce( + operator.or_, [getattr(_os, a, 0) for a in + "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] +) + +class SandboxViolation(DistutilsError): + """A setup script attempted to modify the filesystem outside the sandbox""" + + def __str__(self): + return """SandboxViolation: %s%r %s + +The package setup script has attempted to modify files on your system +that are not within the EasyInstall build area, and has been aborted. + +This package cannot be safely installed by EasyInstall, and may not +support alternate installation locations even if you run its setup +script by hand. Please inform the package's author and the EasyInstall +maintainers to find out if a fix or workaround is available.""" % self.args + + + + + + + + + + + + + + + + + + + + + + + + + + + +# diff --git a/setuptools-0.6c16dev3.egg/setuptools/site-patch.py b/setuptools-0.6c16dev3.egg/setuptools/site-patch.py new file mode 100644 index 0000000..b1b27b9 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/site-patch.py @@ -0,0 +1,74 @@ +def __boot(): + import sys, imp, os, os.path + PYTHONPATH = os.environ.get('PYTHONPATH') + if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): + PYTHONPATH = [] + else: + PYTHONPATH = PYTHONPATH.split(os.pathsep) + + pic = getattr(sys,'path_importer_cache',{}) + stdpath = sys.path[len(PYTHONPATH):] + mydir = os.path.dirname(__file__) + #print "searching",stdpath,sys.path + + for item in stdpath: + if item==mydir or not item: + continue # skip if current dir. on Windows, or my own directory + importer = pic.get(item) + if importer is not None: + loader = importer.find_module('site') + if loader is not None: + # This should actually reload the current module + loader.load_module('site') + break + else: + try: + stream, path, descr = imp.find_module('site',[item]) + except ImportError: + continue + if stream is None: + continue + try: + # This should actually reload the current module + imp.load_module('site',stream,path,descr) + finally: + stream.close() + break + else: + raise ImportError("Couldn't find the real 'site' module") + + #print "loaded", __file__ + + known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp + + oldpos = getattr(sys,'__egginsert',0) # save old insertion position + sys.__egginsert = 0 # and reset the current one + + for item in PYTHONPATH: + addsitedir(item) + + sys.__egginsert += oldpos # restore effective old position + + d,nd = makepath(stdpath[0]) + insert_at = None + new_path = [] + + for item in sys.path: + p,np = makepath(item) + + if np==nd and insert_at is None: + # We've hit the first 'system' path entry, so added entries go here + insert_at = len(new_path) + + if np in known_paths or insert_at is None: + new_path.append(item) + else: + # new path after the insert point, back-insert it + new_path.insert(insert_at, item) + insert_at += 1 + + sys.path[:] = new_path + +if __name__=='site': + __boot() + del __boot diff --git a/setuptools-0.6c16dev3.egg/setuptools/tests/__init__.py b/setuptools-0.6c16dev3.egg/setuptools/tests/__init__.py new file mode 100644 index 0000000..3c4f3cb --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/tests/__init__.py @@ -0,0 +1,369 @@ +"""Tests for the 'setuptools' package""" +from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader +import distutils.core, distutils.cmd +from distutils.errors import DistutilsOptionError, DistutilsPlatformError +from distutils.errors import DistutilsSetupError +import setuptools, setuptools.dist +from setuptools import Feature +from distutils.core import Extension +extract_constant, get_module_constant = None, None +from setuptools.depends import * +from distutils.version import StrictVersion, LooseVersion +from distutils.util import convert_path +import sys, os.path + +def additional_tests(): + import doctest, unittest + suite = unittest.TestSuite(( + doctest.DocFileSuite('api_tests.txt', + optionflags=doctest.ELLIPSIS, package=__name__, + ), + )) + if sys.platform == 'win32': + suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt')) + return suite + +def makeSetup(**args): + """Return distribution from 'setup(**args)', without executing commands""" + + distutils.core._setup_stop_after = "commandline" + + # Don't let system command line leak into tests! + args.setdefault('script_args',['install']) + + try: + return setuptools.setup(**args) + finally: + distutils.core_setup_stop_after = None + + + + +class DependsTests(TestCase): + + def testExtractConst(self): + if not extract_constant: return # skip on non-bytecode platforms + + def f1(): + global x,y,z + x = "test" + y = z + + # unrecognized name + self.assertEqual(extract_constant(f1.func_code,'q', -1), None) + + # constant assigned + self.assertEqual(extract_constant(f1.func_code,'x', -1), "test") + + # expression assigned + self.assertEqual(extract_constant(f1.func_code,'y', -1), -1) + + # recognized name, not assigned + self.assertEqual(extract_constant(f1.func_code,'z', -1), None) + + + def testFindModule(self): + self.assertRaises(ImportError, find_module, 'no-such.-thing') + self.assertRaises(ImportError, find_module, 'setuptools.non-existent') + f,p,i = find_module('setuptools.tests'); f.close() + + def testModuleExtract(self): + if not get_module_constant: return # skip on non-bytecode platforms + from distutils import __version__ + self.assertEqual( + get_module_constant('distutils','__version__'), __version__ + ) + self.assertEqual( + get_module_constant('sys','version'), sys.version + ) + self.assertEqual( + get_module_constant('setuptools.tests','__doc__'),__doc__ + ) + + def testRequire(self): + if not extract_constant: return # skip on non-bytecode platforms + + req = Require('Distutils','1.0.3','distutils') + + self.assertEqual(req.name, 'Distutils') + self.assertEqual(req.module, 'distutils') + self.assertEqual(req.requested_version, '1.0.3') + self.assertEqual(req.attribute, '__version__') + self.assertEqual(req.full_name(), 'Distutils-1.0.3') + + from distutils import __version__ + self.assertEqual(req.get_version(), __version__) + self.failUnless(req.version_ok('1.0.9')) + self.failIf(req.version_ok('0.9.1')) + self.failIf(req.version_ok('unknown')) + + self.failUnless(req.is_present()) + self.failUnless(req.is_current()) + + req = Require('Distutils 3000','03000','distutils',format=LooseVersion) + self.failUnless(req.is_present()) + self.failIf(req.is_current()) + self.failIf(req.version_ok('unknown')) + + req = Require('Do-what-I-mean','1.0','d-w-i-m') + self.failIf(req.is_present()) + self.failIf(req.is_current()) + + req = Require('Tests', None, 'tests', homepage="http://example.com") + self.assertEqual(req.format, None) + self.assertEqual(req.attribute, None) + self.assertEqual(req.requested_version, None) + self.assertEqual(req.full_name(), 'Tests') + self.assertEqual(req.homepage, 'http://example.com') + + paths = [os.path.dirname(p) for p in __path__] + self.failUnless(req.is_present(paths)) + self.failUnless(req.is_current(paths)) + + +class DistroTests(TestCase): + + def setUp(self): + self.e1 = Extension('bar.ext',['bar.c']) + self.e2 = Extension('c.y', ['y.c']) + + self.dist = makeSetup( + packages=['a', 'a.b', 'a.b.c', 'b', 'c'], + py_modules=['b.d','x'], + ext_modules = (self.e1, self.e2), + package_dir = {}, + ) + + + def testDistroType(self): + self.failUnless(isinstance(self.dist,setuptools.dist.Distribution)) + + + def testExcludePackage(self): + self.dist.exclude_package('a') + self.assertEqual(self.dist.packages, ['b','c']) + + self.dist.exclude_package('b') + self.assertEqual(self.dist.packages, ['c']) + self.assertEqual(self.dist.py_modules, ['x']) + self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) + + self.dist.exclude_package('c') + self.assertEqual(self.dist.packages, []) + self.assertEqual(self.dist.py_modules, ['x']) + self.assertEqual(self.dist.ext_modules, [self.e1]) + + # test removals from unspecified options + makeSetup().exclude_package('x') + + + + + + + + def testIncludeExclude(self): + # remove an extension + self.dist.exclude(ext_modules=[self.e1]) + self.assertEqual(self.dist.ext_modules, [self.e2]) + + # add it back in + self.dist.include(ext_modules=[self.e1]) + self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) + + # should not add duplicate + self.dist.include(ext_modules=[self.e1]) + self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) + + def testExcludePackages(self): + self.dist.exclude(packages=['c','b','a']) + self.assertEqual(self.dist.packages, []) + self.assertEqual(self.dist.py_modules, ['x']) + self.assertEqual(self.dist.ext_modules, [self.e1]) + + def testEmpty(self): + dist = makeSetup() + dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) + dist = makeSetup() + dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) + + def testContents(self): + self.failUnless(self.dist.has_contents_for('a')) + self.dist.exclude_package('a') + self.failIf(self.dist.has_contents_for('a')) + + self.failUnless(self.dist.has_contents_for('b')) + self.dist.exclude_package('b') + self.failIf(self.dist.has_contents_for('b')) + + self.failUnless(self.dist.has_contents_for('c')) + self.dist.exclude_package('c') + self.failIf(self.dist.has_contents_for('c')) + + + + + def testInvalidIncludeExclude(self): + self.assertRaises(DistutilsSetupError, + self.dist.include, nonexistent_option='x' + ) + self.assertRaises(DistutilsSetupError, + self.dist.exclude, nonexistent_option='x' + ) + self.assertRaises(DistutilsSetupError, + self.dist.include, packages={'x':'y'} + ) + self.assertRaises(DistutilsSetupError, + self.dist.exclude, packages={'x':'y'} + ) + self.assertRaises(DistutilsSetupError, + self.dist.include, ext_modules={'x':'y'} + ) + self.assertRaises(DistutilsSetupError, + self.dist.exclude, ext_modules={'x':'y'} + ) + + self.assertRaises(DistutilsSetupError, + self.dist.include, package_dir=['q'] + ) + self.assertRaises(DistutilsSetupError, + self.dist.exclude, package_dir=['q'] + ) + + + + + + + + + + + + + + + +class FeatureTests(TestCase): + + def setUp(self): + self.req = Require('Distutils','1.0.3','distutils') + self.dist = makeSetup( + features={ + 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), + 'bar': Feature("bar", standard=True, packages=['pkg.bar'], + py_modules=['bar_et'], remove=['bar.ext'], + ), + 'baz': Feature( + "baz", optional=False, packages=['pkg.baz'], + scripts = ['scripts/baz_it'], + libraries=[('libfoo','foo/foofoo.c')] + ), + 'dwim': Feature("DWIM", available=False, remove='bazish'), + }, + script_args=['--without-bar', 'install'], + packages = ['pkg.bar', 'pkg.foo'], + py_modules = ['bar_et', 'bazish'], + ext_modules = [Extension('bar.ext',['bar.c'])] + ) + + def testDefaults(self): + self.failIf( + Feature( + "test",standard=True,remove='x',available=False + ).include_by_default() + ) + self.failUnless( + Feature("test",standard=True,remove='x').include_by_default() + ) + # Feature must have either kwargs, removes, or require_features + self.assertRaises(DistutilsSetupError, Feature, "test") + + def testAvailability(self): + self.assertRaises( + DistutilsPlatformError, + self.dist.features['dwim'].include_in, self.dist + ) + + def testFeatureOptions(self): + dist = self.dist + self.failUnless( + ('with-dwim',None,'include DWIM') in dist.feature_options + ) + self.failUnless( + ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options + ) + self.failUnless( + ('with-bar',None,'include bar (default)') in dist.feature_options + ) + self.failUnless( + ('without-bar',None,'exclude bar') in dist.feature_options + ) + self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') + self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') + self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') + self.failIf('without-baz' in dist.feature_negopt) + + def testUseFeatures(self): + dist = self.dist + self.assertEqual(dist.with_foo,1) + self.assertEqual(dist.with_bar,0) + self.assertEqual(dist.with_baz,1) + self.failIf('bar_et' in dist.py_modules) + self.failIf('pkg.bar' in dist.packages) + self.failUnless('pkg.baz' in dist.packages) + self.failUnless('scripts/baz_it' in dist.scripts) + self.failUnless(('libfoo','foo/foofoo.c') in dist.libraries) + self.assertEqual(dist.ext_modules,[]) + self.assertEqual(dist.require_features, [self.req]) + + # If we ask for bar, it should fail because we explicitly disabled + # it on the command line + self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') + + def testFeatureWithInvalidRemove(self): + self.assertRaises( + SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} + ) + +class TestCommandTests(TestCase): + + def testTestIsCommand(self): + test_cmd = makeSetup().get_command_obj('test') + self.failUnless(isinstance(test_cmd, distutils.cmd.Command)) + + def testLongOptSuiteWNoDefault(self): + ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) + ts1 = ts1.get_command_obj('test') + ts1.ensure_finalized() + self.assertEqual(ts1.test_suite, 'foo.tests.suite') + + def testDefaultSuite(self): + ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') + ts2.ensure_finalized() + self.assertEqual(ts2.test_suite, 'bar.tests.suite') + + def testDefaultWModuleOnCmdLine(self): + ts3 = makeSetup( + test_suite='bar.tests', + script_args=['test','-m','foo.tests'] + ).get_command_obj('test') + ts3.ensure_finalized() + self.assertEqual(ts3.test_module, 'foo.tests') + self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') + + def testConflictingOptions(self): + ts4 = makeSetup( + script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] + ).get_command_obj('test') + self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) + + def testNoSuite(self): + ts5 = makeSetup().get_command_obj('test') + ts5.ensure_finalized() + self.assertEqual(ts5.test_suite, None) + + + + + diff --git a/setuptools-0.6c16dev3.egg/setuptools/tests/test_packageindex.py b/setuptools-0.6c16dev3.egg/setuptools/tests/test_packageindex.py new file mode 100644 index 0000000..0231eda --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/tests/test_packageindex.py @@ -0,0 +1,27 @@ +"""Package Index Tests +""" +# More would be better! + +import os, shutil, tempfile, unittest, urllib2 +import pkg_resources +import setuptools.package_index + +class TestPackageIndex(unittest.TestCase): + + def test_bad_urls(self): + index = setuptools.package_index.PackageIndex() + url = 'http://127.0.0.1/nonesuch/test_package_index' + try: + v = index.open_url(url) + except Exception, v: + self.assert_(url in str(v)) + else: + self.assert_(isinstance(v,urllib2.HTTPError)) + + def test_url_ok(self): + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + url = 'file:///tmp/test_package_index' + self.assert_(index.url_ok(url, True)) + diff --git a/setuptools-0.6c16dev3.egg/setuptools/tests/test_resources.py b/setuptools-0.6c16dev3.egg/setuptools/tests/test_resources.py new file mode 100755 index 0000000..1c010e7 --- /dev/null +++ b/setuptools-0.6c16dev3.egg/setuptools/tests/test_resources.py @@ -0,0 +1,533 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove +from unittest import TestCase, makeSuite; from pkg_resources import * +from setuptools.command.easy_install import get_script_header, is_sh +import os, pkg_resources, sys, StringIO +try: frozenset +except NameError: + from sets import ImmutableSet as frozenset + +class Metadata(EmptyProvider): + """Mock object to return metadata as if from an on-disk distribution""" + + def __init__(self,*pairs): + self.metadata = dict(pairs) + + def has_metadata(self,name): + return name in self.metadata + + def get_metadata(self,name): + return self.metadata[name] + + def get_metadata_lines(self,name): + return yield_lines(self.get_metadata(name)) + +class DistroTests(TestCase): + + def testCollection(self): + # empty path should produce no distributions + ad = Environment([], platform=None, python=None) + self.assertEqual(list(ad), []) + self.assertEqual(ad['FooPkg'],[]) + ad.add(Distribution.from_filename("FooPkg-1.3_1.egg")) + ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")) + ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg")) + + # Name is in there now + self.failUnless(ad['FooPkg']) + # But only 1 package + self.assertEqual(list(ad), ['foopkg']) + + # Distributions sort by version + self.assertEqual( + [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] + ) + # Removing a distribution leaves sequence alone + ad.remove(ad['FooPkg'][1]) + self.assertEqual( + [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] + ) + # And inserting adds them in order + ad.add(Distribution.from_filename("FooPkg-1.9.egg")) + self.assertEqual( + [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] + ) + + ws = WorkingSet([]) + foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg") + foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg") + req, = parse_requirements("FooPkg>=1.3") + + # Nominal case: no distros on path, should yield all applicable + self.assertEqual(ad.best_match(req,ws).version, '1.9') + # If a matching distro is already installed, should return only that + ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4') + + # If the first matching distro is unsuitable, it's a version conflict + ws = WorkingSet([]); ws.add(foo12); ws.add(foo14) + self.assertRaises(VersionConflict, ad.best_match, req, ws) + + # If more than one match on the path, the first one takes precedence + ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14); + self.assertEqual(ad.best_match(req,ws).version, '1.4') + + def checkFooPkg(self,d): + self.assertEqual(d.project_name, "FooPkg") + self.assertEqual(d.key, "foopkg") + self.assertEqual(d.version, "1.3-1") + self.assertEqual(d.py_version, "2.4") + self.assertEqual(d.platform, "win32") + self.assertEqual(d.parsed_version, parse_version("1.3-1")) + + def testDistroBasics(self): + d = Distribution( + "/some/path", + project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" + ) + self.checkFooPkg(d) + + d = Distribution("/some/path") + self.assertEqual(d.py_version, sys.version[:3]) + self.assertEqual(d.platform, None) + + def testDistroParse(self): + d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg") + self.checkFooPkg(d) + d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info") + self.checkFooPkg(d) + + def testDistroMetadata(self): + d = Distribution( + "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", + metadata = Metadata( + ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") + ) + ) + self.checkFooPkg(d) + + + def distRequires(self, txt): + return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) + + def checkRequires(self, dist, txt, extras=()): + self.assertEqual( + list(dist.requires(extras)), + list(parse_requirements(txt)) + ) + + def testDistroDependsSimple(self): + for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": + self.checkRequires(self.distRequires(v), v) + + + def testResolve(self): + ad = Environment([]); ws = WorkingSet([]) + # Resolving no requirements -> nothing to install + self.assertEqual( list(ws.resolve([],ad)), [] ) + # Request something not in the collection -> DistributionNotFound + self.assertRaises( + DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad + ) + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.egg", + metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) + ) + ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg")) + + # Request thing(s) that are available -> list to activate + for i in range(3): + targets = list(ws.resolve(parse_requirements("Foo"), ad)) + self.assertEqual(targets, [Foo]) + map(ws.add,targets) + self.assertRaises(VersionConflict, ws.resolve, + parse_requirements("Foo==0.9"), ad) + ws = WorkingSet([]) # reset + + # Request an extra that causes an unresolved dependency for "Baz" + self.assertRaises( + DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad + ) + Baz = Distribution.from_filename( + "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) + ) + ad.add(Baz) + + # Activation list now includes resolved dependency + self.assertEqual( + list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] + ) + # Requests for conflicting versions produce VersionConflict + self.assertRaises( VersionConflict, + ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad + ) + + def testDistroDependsOptions(self): + d = self.distRequires(""" + Twisted>=1.5 + [docgen] + ZConfig>=2.0 + docutils>=0.3 + [fastcgi] + fcgiapp>=0.1""") + self.checkRequires(d,"Twisted>=1.5") + self.checkRequires( + d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] + ) + self.checkRequires( + d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] + ) + self.checkRequires( + d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), + ["docgen","fastcgi"] + ) + self.checkRequires( + d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), + ["fastcgi", "docgen"] + ) + self.assertRaises(UnknownExtra, d.requires, ["foo"]) + + + + + + + + + + + + + + + + + +class EntryPointTests(TestCase): + + def assertfields(self, ep): + self.assertEqual(ep.name,"foo") + self.assertEqual(ep.module_name,"setuptools.tests.test_resources") + self.assertEqual(ep.attrs, ("EntryPointTests",)) + self.assertEqual(ep.extras, ("x",)) + self.failUnless(ep.load() is EntryPointTests) + self.assertEqual( + str(ep), + "foo = setuptools.tests.test_resources:EntryPointTests [x]" + ) + + def setUp(self): + self.dist = Distribution.from_filename( + "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) + + def testBasics(self): + ep = EntryPoint( + "foo", "setuptools.tests.test_resources", ["EntryPointTests"], + ["x"], self.dist + ) + self.assertfields(ep) + + def testParse(self): + s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" + ep = EntryPoint.parse(s, self.dist) + self.assertfields(ep) + + ep = EntryPoint.parse("bar baz= spammity[PING]") + self.assertEqual(ep.name,"bar baz") + self.assertEqual(ep.module_name,"spammity") + self.assertEqual(ep.attrs, ()) + self.assertEqual(ep.extras, ("ping",)) + + ep = EntryPoint.parse(" fizzly = wocka:foo") + self.assertEqual(ep.name,"fizzly") + self.assertEqual(ep.module_name,"wocka") + self.assertEqual(ep.attrs, ("foo",)) + self.assertEqual(ep.extras, ()) + + def testRejects(self): + for ep in [ + "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", + ]: + try: EntryPoint.parse(ep) + except ValueError: pass + else: raise AssertionError("Should've been bad", ep) + + def checkSubMap(self, m): + self.assertEqual(len(m), len(self.submap_expect)) + for key, ep in self.submap_expect.iteritems(): + self.assertEqual(repr(m.get(key)), repr(ep)) + + submap_expect = dict( + feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), + feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), + feature3=EntryPoint('feature3', 'this.module', extras=['something']) + ) + submap_str = """ + # define features for blah blah + feature1 = somemodule:somefunction + feature2 = another.module:SomeClass [extra1,extra2] + feature3 = this.module [something] + """ + + def testParseList(self): + self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) + self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") + self.assertRaises(ValueError, EntryPoint.parse_group, "x", + ["foo=baz", "foo=bar"]) + + def testParseMap(self): + m = EntryPoint.parse_map({'xyz':self.submap_str}) + self.checkSubMap(m['xyz']) + self.assertEqual(m.keys(),['xyz']) + m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) + self.checkSubMap(m['xyz']) + self.assertEqual(m.keys(),['xyz']) + self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) + self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) + +class RequirementsTests(TestCase): + + def testBasics(self): + r = Requirement.parse("Twisted>=1.2") + self.assertEqual(str(r),"Twisted>=1.2") + self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") + self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) + self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) + self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) + self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) + self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) + self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) + + def testOrdering(self): + r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) + r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) + self.assertEqual(r1,r2) + self.assertEqual(str(r1),str(r2)) + self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") + + def testBasicContains(self): + r = Requirement("Twisted", [('>=','1.2')], ()) + foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") + twist11 = Distribution.from_filename("Twisted-1.1.egg") + twist12 = Distribution.from_filename("Twisted-1.2.egg") + self.failUnless(parse_version('1.2') in r) + self.failUnless(parse_version('1.1') not in r) + self.failUnless('1.2' in r) + self.failUnless('1.1' not in r) + self.failUnless(foo_dist not in r) + self.failUnless(twist11 not in r) + self.failUnless(twist12 in r) + + def testAdvancedContains(self): + r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") + for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): + self.failUnless(v in r, (v,r)) + for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): + self.failUnless(v not in r, (v,r)) + + + def testOptionsAndHashing(self): + r1 = Requirement.parse("Twisted[foo,bar]>=1.2") + r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") + r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") + self.assertEqual(r1,r2) + self.assertEqual(r1,r3) + self.assertEqual(r1.extras, ("foo","bar")) + self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized + self.assertEqual(hash(r1), hash(r2)) + self.assertEqual( + hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), + frozenset(["foo","bar"]))) + ) + + def testVersionEquality(self): + r1 = Requirement.parse("setuptools==0.3a2") + r2 = Requirement.parse("setuptools!=0.3a4") + d = Distribution.from_filename + + self.failIf(d("setuptools-0.3a4.egg") in r1) + self.failIf(d("setuptools-0.3a1.egg") in r1) + self.failIf(d("setuptools-0.3a4.egg") in r2) + + self.failUnless(d("setuptools-0.3a2.egg") in r1) + self.failUnless(d("setuptools-0.3a2.egg") in r2) + self.failUnless(d("setuptools-0.3a3.egg") in r2) + self.failUnless(d("setuptools-0.3a5.egg") in r2) + + + + + + + + + + + + + + +class ParseTests(TestCase): + + def testEmptyParse(self): + self.assertEqual(list(parse_requirements('')), []) + + def testYielding(self): + for inp,out in [ + ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), + (['x\n\n','y'], ['x','y']), + ]: + self.assertEqual(list(pkg_resources.yield_lines(inp)),out) + + def testSplitting(self): + self.assertEqual( + list( + pkg_resources.split_sections(""" + x + [Y] + z + + a + [b ] + # foo + c + [ d] + [q] + v + """ + ) + ), + [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] + ) + self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) + + def testSafeName(self): + self.assertEqual(safe_name("adns-python"), "adns-python") + self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") + self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") + self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") + self.assertNotEqual(safe_name("peak.web"), "peak-web") + + def testSafeVersion(self): + self.assertEqual(safe_version("1.2-1"), "1.2-1") + self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") + self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") + self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") + self.assertEqual(safe_version("peak.web"), "peak.web") + + def testSimpleRequirements(self): + self.assertEqual( + list(parse_requirements('Twis-Ted>=1.2-1')), + [Requirement('Twis-Ted',[('>=','1.2-1')], ())] + ) + self.assertEqual( + list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), + [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] + ) + self.assertEqual( + Requirement.parse("FooBar==1.99a3"), + Requirement("FooBar", [('==','1.99a3')], ()) + ) + self.assertRaises(ValueError,Requirement.parse,">=2.3") + self.assertRaises(ValueError,Requirement.parse,"x\\") + self.assertRaises(ValueError,Requirement.parse,"x==2 q") + self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") + self.assertRaises(ValueError,Requirement.parse,"#") + + def testVersionEquality(self): + def c(s1,s2): + p1, p2 = parse_version(s1),parse_version(s2) + self.assertEqual(p1,p2, (s1,s2,p1,p2)) + + c('1.2-rc1', '1.2rc1') + c('0.4', '0.4.0') + c('0.4.0.0', '0.4.0') + c('0.4.0-0', '0.4-0') + c('0pl1', '0.0pl1') + c('0pre1', '0.0c1') + c('0.0.0preview1', '0c1') + c('0.0c1', '0-rc1') + c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a') + + def testVersionOrdering(self): + def c(s1,s2): + p1, p2 = parse_version(s1),parse_version(s2) + self.failUnless(p1 + "easy_install will install a package that is already there" + + + "be more like distutils with regard to --prefix=" + + + "respect the PYTHONPATH" + (Note: this patch does not work as intended when site.py has been modified. + This will be fixed in a future version.) + + + * The following patch to setuptools introduced bugs, and has been reverted + in zetuptoolz: + + $ svn log -r 45514 + ------------------------------------------------------------------------ + r45514 | phillip.eby | 2006-04-18 04:03:16 +0100 (Tue, 18 Apr 2006) | 9 lines + + Backport pkgutil, pydoc, and doctest from the 2.5 trunk to setuptools + 0.7 trunk. (Sideport?) Setuptools 0.7 will install these in place of + the 2.3/2.4 versions (at least of pydoc and doctest) to let them work + properly with eggs. pkg_resources now depends on the 2.5 pkgutil, which + is included here as _pkgutil, to work around the fact that some system + packagers will install setuptools without overriding the stdlib modules. + But users who install their own setuptools will get them, and the system + packaged people probably don't need them. + ------------------------------------------------------------------------ + + + * If unpatched setuptools decides that it needs to change an existing site.py + file that appears not to have been written by it (because the file does not + start with "def __boot():"), it aborts the installation. + zetuptoolz leaves the file alone and outputs a warning, but continues with + the installation. + + + * The scripts written by zetuptoolz have the following extra line: + + # generated by zetuptoolz + + after the header. + + + * Windows-specific changes (native Python): + + Python distributions may have command-line or GUI scripts. + On Windows, setuptools creates an executable wrapper to run each + script. zetuptools uses a different approach that does not require + an .exe wrapper. It writes approximately the same script file that + is used on other platforms, but with a .pyscript extension. + It also writes a shell-script wrapper (without any extension) that + is only used when the command is run from a Cygwin shell. + + Some of the advantages of this approach are: + + * Unicode arguments are preserved (although the program will + need to use some Windows-specific code to get at them in + current versions of Python); + * it works correctly on 64-bit Windows; + * the zetuptoolz distribution need not contain either any + binary executables, or any C code that needs to be compiled. + + See setuptools\tests\win_script_wrapper.txt for further details. + + Installing or building any distribution on Windows will automatically + associate .pyscript with the native Python interpreter for the current + user. It will also add .pyscript and .pyw to the PATHEXT variable for + the current user, which is needed to allow scripts to be run without + typing any extension. + + There is an additional setup.py command that can be used to perform + these steps separately (which isn't normally needed, but might be + useful for debugging): + + python setup.py scriptsetup + + Adding the --allusers option, i.e. + + python setup.py scriptsetup --allusers + + will make the .pyscript association and changes to the PATHEXT variable + for all users of this Windows installation, except those that have it + overridden in their per-user environment. In this case setup.py must be + run with Administrator privileges, e.g. from a Command Prompt whose + shortcut has been set to run as Administrator. diff --git a/stridetune-bench.ba.sh b/stridetune-bench.ba.sh new file mode 100755 index 0000000..01555ca --- /dev/null +++ b/stridetune-bench.ba.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +/bin/rm -rf ./benchresults +mkdir benchresults +STRIDE=32 +while [ $(( $STRIDE < 32769 )) ] ; do + /bin/rm -rf build + rm zfec/_fec.so + /bin/rm -rf instdir + mkdir instdir + PYTHONPATH=instdir ./setup.py develop --install-dir=instdir --stride=${STRIDE} >/dev/null + echo $STRIDE + PYTHONPATH=instdir python -OO ./bench/bench_zfec.py >> benchresults/comp_0-stride_$STRIDE + tail -1 benchresults/comp_0-stride_$STRIDE + STRIDE=$(( $STRIDE + 32 )) +done diff --git a/stridetune-bench.py b/stridetune-bench.py new file mode 100755 index 0000000..02c13e1 --- /dev/null +++ b/stridetune-bench.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +import bisect, random, os, re + +from pyutil import fileutil + +assert not os.path.exists("benchresults") + +os.mkdir("benchresults") + +MIN=512 +MAX=1024 + +results = {} + +R=re.compile("ave rate: ([1-9][0-9]*)") + +def measure(stride): + fileutil.rm_dir("build") + fileutil.rm_dir("instdir") + fileutil.remove_if_possible(os.path.join("zfec", "_fec.so")) + fileutil.make_dirs("instdir") + fname = os.path.join("benchresults", "comp_0-stride_%d"%stride) + os.system("PYTHONPATH=instdir ./setup.py develop --install-dir=instdir --stride=%d >/dev/null" % stride) + os.system("PYTHONPATH=instdir python -OO ./bench/bench_zfec.py >> %s" % fname) + inf = open(fname, "rU") + for l in inf: + m = R.search(l) + if m: + result = int(m.group(1)) + if results.has_key(stride): + print "stride: %d, results: %d (dup %d)" % (stride, result, results[stride]) + else: + print "stride: %d, results: %d" % (stride, result) + results[stride] = result + break + +measure(MIN) +measure(MAX) + +while True: + stride = random.randrange(MIN, MAX+1) + measure(stride) diff --git a/stridetune-dat.bash b/stridetune-dat.bash new file mode 100755 index 0000000..b250bff --- /dev/null +++ b/stridetune-dat.bash @@ -0,0 +1,10 @@ +#!/bin/bash + +DAT=stridetune.dat +rm -f $DAT +for F in benchresults/comp_0-stride_*; do + NUM=${F:27} + for M in `grep "^N: " benchresults/comp_0-stride_$NUM | cut -d':' -f10-` ; do + echo "$NUM $M" >> $DAT + done +done diff --git a/stridetune-graph.py b/stridetune-graph.py new file mode 100755 index 0000000..7025989 --- /dev/null +++ b/stridetune-graph.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python + +from pyx import * +def g(f): + g=graph.graphxy(width=16, x=graph.axis.linear(), y=graph.axis.linear()) + + g.plot([graph.data.file(f, x=1, y=2)]) + g.writeEPSfile(f+'.eps') + +g('stridetune.dat') diff --git a/zfec.egg-info/stdeb.cfg b/zfec.egg-info/stdeb.cfg new file mode 100644 index 0000000..69707c1 --- /dev/null +++ b/zfec.egg-info/stdeb.cfg @@ -0,0 +1,2 @@ +[zfec] +Copyright-File: copyright diff --git a/zfec/COPYING.GPL b/zfec/COPYING.GPL deleted file mode 100644 index 5075166..0000000 --- a/zfec/COPYING.GPL +++ /dev/null @@ -1,451 +0,0 @@ -This work also comes with the added permission that you may combine it with a -work licensed under the OpenSSL license (any version) and distribute the -resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the OpenSSL license. - -This work also comes with the added permission that you may combine it with a -work licensed under the Eclipse Public Licence (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Eclipse Public Licence. - -This work also comes with the added permission that you may combine it with a -work licensed under the Q Public Licence (any version) and distribute the -resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Q Public Licence. - -This work also comes with the added permission that you may combine it with a -work licensed under the Apache Licence (any version) and distribute the -resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Apache Licence. - -This work also comes with the added permission that you may combine it with a -work licensed under the GNU Lesser General Public License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the GNU Lesser -General Public License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Zope Public License (any version) and distribute the -resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Zope Public License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Python Software Foundation License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Python -Software Foundation License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Academic Free License (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Academic Free License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Apple Public Source License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Apple Public -Source License. - -This work also comes with the added permission that you may combine it with a -work licensed under the BitTorrent Open Source License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the BitTorrent -Open Source License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Lucent Public License (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Lucent Public License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Jabber Open Source License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Jabber Open -Source License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Common Development and Distribution License (any -version) and distribute the resulting combined work, as long as you follow -the requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Common -Development and Distribution License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Microsoft Public License (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Microsoft Public License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Microsoft Reciprocal License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Microsoft -Reciprocal License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Sun Industry Standards Source License (any version) -and distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Sun Industry -Standards Source License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Open Software License (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Open Software License. - - - - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License along - with this program; if not, write to the Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, the commands you use may -be called something other than `show w' and `show c'; they could even be -mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program - `Gnomovision' (which makes passes at compilers) written by James Hacker. - - , 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program into -proprietary programs. If your program is a subroutine library, you may -consider it more useful to permit linking proprietary applications with the -library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. diff --git a/zfec/COPYING.TGPPL.rst b/zfec/COPYING.TGPPL.rst deleted file mode 100644 index e89b079..0000000 --- a/zfec/COPYING.TGPPL.rst +++ /dev/null @@ -1,291 +0,0 @@ -This work also comes with the added permission that you may combine it with a -work licensed under the OpenSSL license (any version) and distribute the -resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the OpenSSL license. - -This work also comes with the added permission that you may combine it with a -work licensed under the Eclipse Public Licence (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Eclipse Public Licence. - -This work also comes with the added permission that you may combine it with a -work licensed under the Q Public Licence (any version) and distribute the -resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Q Public Licence. - -This work also comes with the added permission that you may combine it with a -work licensed under the Apache Licence (any version) and distribute the -resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Apache Licence. - -This work also comes with the added permission that you may combine it with a -work licensed under the GNU Lesser General Public License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the GNU Lesser -General Public License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Zope Public License (any version) and distribute the -resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Zope Public License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Python Software Foundation License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Python -Software Foundation License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Academic Free License (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Academic Free License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Apple Public Source License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Apple Public -Source License. - -This work also comes with the added permission that you may combine it with a -work licensed under the BitTorrent Open Source License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the BitTorrent -Open Source License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Lucent Public License (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Lucent Public License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Jabber Open Source License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Jabber Open -Source License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Common Development and Distribution License (any -version) and distribute the resulting combined work, as long as you follow -the requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Common -Development and Distribution License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Microsoft Public License (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Microsoft Public License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Microsoft Reciprocal License (any version) and -distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Microsoft -Reciprocal License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Sun Industry Standards Source License (any version) -and distribute the resulting combined work, as long as you follow the -requirements of the licences of this work in regard to all of the -resulting combined work aside from the work licensed under the Sun Industry -Standards Source License. - -This work also comes with the added permission that you may combine it with a -work licensed under the Open Software License (any version) and distribute -the resulting combined work, as long as you follow the requirements of the -licences of this work in regard to all of the resulting combined work -aside from the work licensed under the Open Software License. - -======================================================= -Transitive Grace Period Public Licence ("TGPPL") v. 1.0 -======================================================= - -This Transitive Grace Period Public Licence (the "License") applies to any -original work of authorship (the "Original Work") whose owner (the -"Licensor") has placed the following licensing notice adjacent to the -copyright notice for the Original Work: - - *Licensed under the Transitive Grace Period Public Licence version 1.0* - -1. **Grant of Copyright License.** Licensor grants You a worldwide, - royalty-free, non-exclusive, sublicensable license, for the duration of - the copyright, to do the following: - - a. to reproduce the Original Work in copies, either alone or as part of a - collective work; - - b. to translate, adapt, alter, transform, modify, or arrange the Original - Work, thereby creating derivative works ("Derivative Works") based upon - the Original Work; - - c. to distribute or communicate copies of the Original Work and Derivative - Works to the public, with the proviso that copies of Original Work or - Derivative Works that You distribute or communicate shall be licensed - under this Transitive Grace Period Public Licence no later than 12 - months after You distributed or communicated said copies; - - d. to perform the Original Work publicly; and - - e. to display the Original Work publicly. - -2. **Grant of Patent License.** Licensor grants You a worldwide, - royalty-free, non-exclusive, sublicensable license, under patent claims - owned or controlled by the Licensor that are embodied in the Original - Work as furnished by the Licensor, for the duration of the patents, to - make, use, sell, offer for sale, have made, and import the Original Work - and Derivative Works. - -3. **Grant of Source Code License.** The term "Source Code" means the - preferred form of the Original Work for making modifications to it and - all available documentation describing how to modify the Original - Work. Licensor agrees to provide a machine-readable copy of the Source - Code of the Original Work along with each copy of the Original Work that - Licensor distributes. Licensor reserves the right to satisfy this - obligation by placing a machine-readable copy of the Source Code in an - information repository reasonably calculated to permit inexpensive and - convenient access by You for as long as Licensor continues to distribute - the Original Work. - -4. **Exclusions From License Grant.** Neither the names of Licensor, nor the - names of any contributors to the Original Work, nor any of their - trademarks or service marks, may be used to endorse or promote products - derived from this Original Work without express prior permission of the - Licensor. Except as expressly stated herein, nothing in this License - grants any license to Licensor's trademarks, copyrights, patents, trade - secrets or any other intellectual property. No patent license is granted - to make, use, sell, offer for sale, have made, or import embodiments of - any patent claims other than the licensed claims defined in Section 2. No - license is granted to the trademarks of Licensor even if such marks are - included in the Original Work. Nothing in this License shall be - interpreted to prohibit Licensor from licensing under terms different - from this License any Original Work that Licensor otherwise would have a - right to license. - -5. **External Deployment.** The term "External Deployment" means the use, - distribution, or communication of the Original Work or Derivative Works - in any way such that the Original Work or Derivative Works may be used by - anyone other than You, whether those works are distributed or - communicated to those persons or made available as an application - intended for use over a network. As an express condition for the grants - of license hereunder, You must treat any External Deployment by You of - the Original Work or a Derivative Work as a distribution under section - 1(c). - -6. **Attribution Rights.** You must retain, in the Source Code of any - Derivative Works that You create, all copyright, patent, or trademark - notices from the Source Code of the Original Work, as well as any notices - of licensing and any descriptive text identified therein as an - "Attribution Notice." You must cause the Source Code for any Derivative - Works that You create to carry a prominent Attribution Notice reasonably - calculated to inform recipients that You have modified the Original Work. - -7. **Warranty of Provenance and Disclaimer of Warranty.** Licensor warrants - that the copyright in and to the Original Work and the patent rights - granted herein by Licensor are owned by the Licensor or are sublicensed - to You under the terms of this License with the permission of the - contributor(s) of those copyrights and patent rights. Except as expressly - stated in the immediately preceding sentence, the Original Work is - provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, - either express or implied, including, without limitation, the warranties - of non-infringement, merchantability or fitness for a particular - purpose. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH - YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this - License. No license to the Original Work is granted by this License - except under this disclaimer. - -8. **Limitation of Liability.** Under no circumstances and under no legal - theory, whether in tort (including negligence), contract, or otherwise, - shall the Licensor be liable to anyone for any indirect, special, - incidental, or consequential damages of any character arising as a result - of this License or the use of the Original Work including, without - limitation, damages for loss of goodwill, work stoppage, computer failure - or malfunction, or any and all other commercial damages or losses. This - limitation of liability shall not apply to the extent applicable law - prohibits such limitation. - -9. **Acceptance and Termination.** If, at any time, You expressly assented - to this License, that assent indicates your clear and irrevocable - acceptance of this License and all of its terms and conditions. If You - distribute or communicate copies of the Original Work or a Derivative - Work, You must make a reasonable effort under the circumstances to obtain - the express assent of recipients to the terms of this License. This - License conditions your rights to undertake the activities listed in - Section 1, including your right to create Derivative Works based upon the - Original Work, and doing so without honoring these terms and conditions - is prohibited by copyright law and international treaty. Nothing in this - License is intended to affect copyright exceptions and limitations - (including 'fair use' or 'fair dealing'). This License shall terminate - immediately and You may no longer exercise any of the rights granted to - You by this License upon your failure to honor the conditions in Section - 1(c). - -10. **Termination for Patent Action.** This License shall terminate - automatically and You may no longer exercise any of the rights granted to - You by this License as of the date You commence an action, including a - cross-claim or counterclaim, against Licensor or any licensee alleging - that the Original Work infringes a patent. This termination provision - shall not apply for an action alleging patent infringement by - combinations of the Original Work with other software or hardware. - -11. **Jurisdiction, Venue and Governing Law.** Any action or suit relating to - this License may be brought only in the courts of a jurisdiction wherein - the Licensor resides or in which Licensor conducts its primary business, - and under the laws of that jurisdiction excluding its conflict-of-law - provisions. The application of the United Nations Convention on Contracts - for the International Sale of Goods is expressly excluded. Any use of the - Original Work outside the scope of this License or after its termination - shall be subject to the requirements and penalties of copyright or patent - law in the appropriate jurisdiction. This section shall survive the - termination of this License. - -12. **Attorneys' Fees.** In any action to enforce the terms of this License - or seeking damages relating thereto, the prevailing party shall be - entitled to recover its costs and expenses, including, without - limitation, reasonable attorneys' fees and costs incurred in connection - with such action, including any appeal of such action. This section shall - survive the termination of this License. - -13. **Miscellaneous.** If any provision of this License is held to be - unenforceable, such provision shall be reformed only to the extent - necessary to make it enforceable. - -14. **Definition of "You" in This License.** "You" throughout this License, - whether in upper or lower case, means an individual or a legal entity - exercising rights under, and complying with all of the terms of, this - License. For legal entities, "You" includes any entity that controls, is - controlled by, or is under common control with you. For purposes of this - definition, "control" means (i) the power, direct or indirect, to cause - the direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - -15. **Right to Use.** You may use the Original Work in all ways not otherwise - restricted or conditioned by this License or by law, and Licensor - promises not to interfere with or be responsible for such uses by You. - -16. **Modification of This License.** This License is Copyright © 2007 Zooko - Wilcox-O'Hearn. Permission is granted to copy, distribute, or communicate - this License without modification. Nothing in this License permits You to - modify this License as applied to the Original Work or to Derivative - Works. However, You may modify the text of this License and copy, - distribute or communicate your modified version (the "Modified License") - and apply it to other original works of authorship subject to the - following conditions: (i) You may not indicate in any way that your - Modified License is the "Transitive Grace Period Public Licence" or - "TGPPL" and you may not use those names in the name of your Modified - License; and (ii) You must replace the notice specified in the first - paragraph above with the notice "Licensed under " or with a notice of your own that is not confusingly similar to - the notice in this License. diff --git a/zfec/NEWS.txt b/zfec/NEWS.txt deleted file mode 100644 index 15c442d..0000000 --- a/zfec/NEWS.txt +++ /dev/null @@ -1,10 +0,0 @@ -User visible changes in zfec. -*- outline -*- - -* Release 1.4.5 (2009-06-17) - -** Bug fixes - -Fix seg fault if the Python classes Encoder or Decoder are constructed with k or m less than 1, greater than 256, or with k greater m. -Fix several compiler warnings, add unit tests, improve Python packaging, set up more buildbots to run the unit tests on more platforms. - -For details about older releases, see the version control history. diff --git a/zfec/README.rst b/zfec/README.rst deleted file mode 100644 index 0db69fc..0000000 --- a/zfec/README.rst +++ /dev/null @@ -1,325 +0,0 @@ - - -zfec -- efficient, portable erasure coding tool -=============================================== - -Generate redundant blocks of information such that if some of the blocks are -lost then the original data can be recovered from the remaining blocks. This -package includes command-line tools, C API, Python API, and Haskell API. - - -Intro and Licence ------------------ - -This package implements an "erasure code", or "forward error correction -code". - -You may use this package under the GNU General Public License, version 2 or, -at your option, any later version. You may use this package under the -Transitive Grace Period Public Licence, version 1.0 or, at your option, any -later version. (You may choose to use this package under the terms of either -licence, at your option.) See the file COPYING.GPL for the terms of the GNU -General Public License, version 2. See the file COPYING.TGPPL.rst for the -terms of the Transitive Grace Period Public Licence, version 1.0. - -The most widely known example of an erasure code is the RAID-5 algorithm -which makes it so that in the event of the loss of any one hard drive, the -stored data can be completely recovered. The algorithm in the zfec package -has a similar effect, but instead of recovering from the loss of only a -single element, it can be parameterized to choose in advance the number of -elements whose loss it can tolerate. - -This package is largely based on the old "fec" library by Luigi Rizzo et al., -which is a mature and optimized implementation of erasure coding. The zfec -package makes several changes from the original "fec" package, including -addition of the Python API, refactoring of the C API to support zero-copy -operation, a few clean-ups and optimizations of the core code itself, and the -addition of a command-line tool named "zfec". - - -Installation ------------- - -This package is managed with the "setuptools" package management tool. To -build and install the package directly into your system, just run ``python -./setup.py install``. If you prefer to keep the package limited to a -specific directory so that you can manage it yourself (perhaps by using the -"GNU stow") tool, then give it these arguments: ``python ./setup.py install ---single-version-externally-managed ---record=${specificdirectory}/zfec-install.log ---prefix=${specificdirectory}`` - -To run the self-tests, execute ``python ./setup.py test`` (or if you have -Twisted Python installed, you can run ``trial zfec`` for nicer output and -test options.) This will run the tests of the C API, the Python API, and the -command-line tools. - -To run the tests of the Haskell API: ``runhaskell haskell/test/FECTest.hs`` - -Note that in order to run the Haskell API tests you must have installed the -library first due to the fact that the interpreter cannot process FEC.hs as -it takes a reference to an FFI function. - - -Community ---------- - -The source is currently available via darcs on the web with the command: - -darcs get https://tahoe-lafs.org/source/zfec/trunk - -More information on darcs is available at http://darcs.net - -Please post about zfec to the Tahoe-LAFS mailing list and contribute patches: - - - - -Overview --------- - -This package performs two operations, encoding and decoding. Encoding takes -some input data and expands its size by producing extra "check blocks", also -called "secondary blocks". Decoding takes some data -- any combination of -blocks of the original data (called "primary blocks") and "secondary blocks", -and produces the original data. - -The encoding is parameterized by two integers, k and m. m is the total -number of blocks produced, and k is how many of those blocks are necessary to -reconstruct the original data. m is required to be at least 1 and at most -256, and k is required to be at least 1 and at most m. - -(Note that when k == m then there is no point in doing erasure coding -- it -degenerates to the equivalent of the Unix "split" utility which simply splits -the input into successive segments. Similarly, when k == 1 it degenerates to -the equivalent of the unix "cp" utility -- each block is a complete copy of -the input data.) - -Note that each "primary block" is a segment of the original data, so its size -is 1/k'th of the size of original data, and each "secondary block" is of the -same size, so the total space used by all the blocks is m/k times the size of -the original data (plus some padding to fill out the last primary block to be -the same size as all the others). In addition to the data contained in the -blocks themselves there are also a few pieces of metadata which are necessary -for later reconstruction. Those pieces are: 1. the value of K, 2. the -value of M, 3. the sharenum of each block, 4. the number of bytes of -padding that were used. The "zfec" command-line tool compresses these pieces -of data and prepends them to the beginning of each share, so each the -sharefile produced by the "zfec" command-line tool is between one and four -bytes larger than the share data alone. - -The decoding step requires as input k of the blocks which were produced by -the encoding step. The decoding step produces as output the data that was -earlier input to the encoding step. - - -Command-Line Tool ------------------ - -The bin/ directory contains two Unix-style, command-line tools "zfec" and -"zunfec". Execute ``zfec --help`` or ``zunfec --help`` for usage -instructions. - - -Performance ------------ - -To run the benchmarks, execute the included bench/bench_zfec.py script with -optional --k= and --m= arguments. - -On my Athlon 64 2.4 GHz workstation (running Linux), the "zfec" command-line -tool encoded a 160 MB file with m=100, k=94 (about 6% redundancy) in 3.9 -seconds, where the "par2" tool encoded the file with about 6% redundancy in -27 seconds. zfec encoded the same file with m=12, k=6 (100% redundancy) in -4.1 seconds, where par2 encoded it with about 100% redundancy in 7 minutes -and 56 seconds. - -The underlying C library in benchmark mode encoded from a file at about 4.9 -million bytes per second and decoded at about 5.8 million bytes per second. - -On Peter's fancy Intel Mac laptop (2.16 GHz Core Duo), it encoded from a file -at about 6.2 million bytes per second. - -On my even fancier Intel Mac laptop (2.33 GHz Core Duo), it encoded from a -file at about 6.8 million bytes per second. - -On my old PowerPC G4 867 MHz Mac laptop, it encoded from a file at about 1.3 -million bytes per second. - -Here is a paper analyzing the performance of various erasure codes and their -implementations, including zfec: - -http://www.usenix.org/events/fast09/tech/full_papers/plank/plank.pdf - -Zfec shows good performance on different machines and with different values -of K and M. It also has a nice small memory footprint. - - -API ---- - -Each block is associated with "blocknum". The blocknum of each primary block -is its index (starting from zero), so the 0'th block is the first primary -block, which is the first few bytes of the file, the 1'st block is the next -primary block, which is the next few bytes of the file, and so on. The last -primary block has blocknum k-1. The blocknum of each secondary block is an -arbitrary integer between k and 255 inclusive. (When using the Python API, -if you don't specify which secondary blocks you want when invoking encode(), -then it will by default provide the blocks with ids from k to m-1 inclusive.) - -- C API - - fec_encode() takes as input an array of k pointers, where each pointer - points to a memory buffer containing the input data (i.e., the i'th buffer - contains the i'th primary block). There is also a second parameter which - is an array of the blocknums of the secondary blocks which are to be - produced. (Each element in that array is required to be the blocknum of a - secondary block, i.e. it is required to be >= k and < m.) - - The output from fec_encode() is the requested set of secondary blocks which - are written into output buffers provided by the caller. - - Note that this fec_encode() is a "low-level" API in that it requires the - input data to be provided in a set of memory buffers of exactly the right - sizes. If you are starting instead with a single buffer containing all of - the data then please see easyfec.py's "class Encoder" as an example of how - to split a single large buffer into the appropriate set of input buffers - for fec_encode(). If you are starting with a file on disk, then please see - filefec.py's encode_file_stringy_easyfec() for an example of how to read - the data from a file and pass it to "class Encoder". The Python interface - provides these higher-level operations, as does the Haskell interface. If - you implement functions to do these higher-level tasks in other languages, - please send a patch to tahoe-dev@tahoe-lafs.org so that your API can be - included in future releases of zfec. - - fec_decode() takes as input an array of k pointers, where each pointer - points to a buffer containing a block. There is also a separate input - parameter which is an array of blocknums, indicating the blocknum of each - of the blocks which is being passed in. - - The output from fec_decode() is the set of primary blocks which were - missing from the input and had to be reconstructed. These reconstructed - blocks are written into output buffers provided by the caller. - - -- Python API - - encode() and decode() take as input a sequence of k buffers, where a - "sequence" is any object that implements the Python sequence protocol (such - as a list or tuple) and a "buffer" is any object that implements the Python - buffer protocol (such as a string or array). The contents that are - required to be present in these buffers are the same as for the C API. - - encode() also takes a list of desired blocknums. Unlike the C API, the - Python API accepts blocknums of primary blocks as well as secondary blocks - in its list of desired blocknums. encode() returns a list of buffer - objects which contain the blocks requested. For each requested block which - is a primary block, the resulting list contains a reference to the - apppropriate primary block from the input list. For each requested block - which is a secondary block, the list contains a newly created string object - containing that block. - - decode() also takes a list of integers indicating the blocknums of the - blocks being passed int. decode() returns a list of buffer objects which - contain all of the primary blocks of the original data (in order). For - each primary block which was present in the input list, then the result - list simply contains a reference to the object that was passed in the input - list. For each primary block which was not present in the input, the - result list contains a newly created string object containing that primary - block. - - Beware of a "gotcha" that can result from the combination of mutable data - and the fact that the Python API returns references to inputs when - possible. - - Returning references to its inputs is efficient since it avoids making an - unnecessary copy of the data, but if the object which was passed as input - is mutable and if that object is mutated after the call to zfec returns, - then the result from zfec -- which is just a reference to that same object - -- will also be mutated. This subtlety is the price you pay for avoiding - data copying. If you don't want to have to worry about this then you can - simply use immutable objects (e.g. Python strings) to hold the data that - you pass to zfec. - -- Haskell API - - The Haskell code is fully Haddocked, to generate the documentation, run - ``runhaskell Setup.lhs haddock``. - - -Utilities ---------- - -The filefec.py module has a utility function for efficiently reading a file -and encoding it piece by piece. This module is used by the "zfec" and -"zunfec" command-line tools from the bin/ directory. - - -Dependencies ------------- - -A C compiler is required. To use the Python API or the command-line tools a -Python interpreter is also required. We have tested it with Python v2.4, -v2.5, v2.6, and v2.7. For the Haskell interface, GHC >= 6.8.1 is required. - - -Acknowledgements ----------------- - -Thanks to the author of the original fec lib, Luigi Rizzo, and the folks that -contributed to it: Phil Karn, Robert Morelos-Zaragoza, Hari Thirumoorthy, and -Dan Rubenstein. Thanks to the Mnet hackers who wrote an earlier Python -wrapper, especially Myers Carpenter and Hauke Johannknecht. Thanks to Brian -Warner and Amber O'Whielacronx for help with the API, documentation, -debugging, compression, and unit tests. Thanks to Adam Langley for improving -the C API and contributing the Haskell API. Thanks to the creators of GCC -(starting with Richard M. Stallman) and Valgrind (starting with Julian -Seward) for a pair of excellent tools. Thanks to my coworkers at Allmydata --- http://allmydata.com -- Fabrice Grinda, Peter Secor, Rob Kinninmont, Brian -Warner, Zandr Milewski, Justin Boreta, Mark Meras for sponsoring this work -and releasing it under a Free Software licence. Thanks to Jack Lloyd, Samuel -Neves, and David-Sarah Hopwood. - - -Related Works -------------- - -Note: a Unix-style tool like "zfec" does only one thing -- in this case -erasure coding -- and leaves other tasks to other tools. Other Unix-style -tools that go well with zfec include `GNU tar`_ for archiving multiple files -and directories into one file, `lzip`_ for compression, and `GNU Privacy -Guard`_ for encryption or `b2sum`_ for integrity. It is important to do -things in order: first archive, then compress, then either encrypt or -integrity-check, then erasure code. Note that if GNU Privacy Guard is used -for privacy, then it will also ensure integrity, so the use of b2sum is -unnecessary in that case. Note also that you also need to do integrity -checking (such as with b2sum) on the blocks that result from the erasure -coding in *addition* to doing it on the file contents! (There are two -different subtle failure modes -- see "more than one file can match an -immutable file cap" on the `Hack Tahoe-LAFS!`_ Hall of Fame.) - -The `Tahoe-LAFS`_ project uses zfec as part of a complete distributed -filesystem with integrated encryption, integrity, remote distribution of the -blocks, directory structure, backup of changed files or directories, access -control, immutable files and directories, proof-of-retrievability, and repair -of damaged files and directories. - -`fecpp`_ is an alternative to zfec. It implements a bitwise-compatible -algorithm to zfec and is BSD-licensed. - -.. _GNU tar: http://directory.fsf.org/project/tar/ -.. _lzip: http://www.nongnu.org/lzip/lzip.html -.. _GNU Privacy Guard: http://gnupg.org/ -.. _b2sum: https://blake2.net/ -.. _Tahoe-LAFS: https://tahoe-lafs.org -.. _Hack Tahoe-LAFS!: https://tahoe-lafs.org/hacktahoelafs/ -.. _fecpp: http://www.randombit.net/code/fecpp/ - - -Enjoy! - -Zooko Wilcox-O'Hearn - -2013-05-15 - -Boulder, Colorado diff --git a/zfec/Setup.lhs b/zfec/Setup.lhs deleted file mode 100755 index 5bde0de..0000000 --- a/zfec/Setup.lhs +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env runhaskell -> import Distribution.Simple -> main = defaultMain diff --git a/zfec/TODO b/zfec/TODO deleted file mode 100644 index b76b8b1..0000000 --- a/zfec/TODO +++ /dev/null @@ -1,15 +0,0 @@ - * INSTALL doc - - * catch EnvironmentError when writing sharefiles and clean up - * try Duff's device in _addmul1()? - * memory usage analysis - * What Every Programmer Should Know About Memory by Ulrich Drepper - * test handling of filesystem exceptional conditions (tricky) - * Jerasure - ** try multiplication without a lookup table? (to save cache pressure) - * conditional compilation to handle a printf that doesn't understand %Zu - * try malloc() instead of alloca() (more portable, possibly better for keeping stacks aligned?) - - * announce on lwn, p2p-hackers - - * streaming mode diff --git a/zfec/__init__.py b/zfec/__init__.py new file mode 100644 index 0000000..475acf9 --- /dev/null +++ b/zfec/__init__.py @@ -0,0 +1,31 @@ +""" +zfec -- fast forward error correction library with Python interface + +maintainer web site: U{http://tahoe-lafs.org/source/zfec} + +zfec web site: U{http://tahoe-lafs.org/source/zfec} +""" + +__version__ = "unknown" +try: + from _version import __version__ +except ImportError: + # We're running in a tree that hasn't run darcsver, and didn't come with a + # _version.py, so we don't know what our version is. This should not happen + # very often. + pass + +from _fec import Encoder, Decoder, Error +import easyfec, filefec, cmdline_zfec, cmdline_zunfec + +quiet_pyflakes=[__version__, Error, Encoder, Decoder, cmdline_zunfec, filefec, cmdline_zfec, easyfec] + +# zfec -- fast forward error correction library with Python interface +# +# Copyright (C) 2007-2010 Allmydata, Inc. +# Author: Zooko Wilcox-O'Hearn +# mailto:zooko@zooko.com +# +# This file is part of zfec. +# +# See README.rst for licensing information. diff --git a/zfec/_fecmodule.c b/zfec/_fecmodule.c new file mode 100644 index 0000000..a742051 --- /dev/null +++ b/zfec/_fecmodule.c @@ -0,0 +1,648 @@ +/** + * zfec -- fast forward error correction library with Python interface + */ + +#include +#include +#include + + +#if (PY_VERSION_HEX < 0x02050000) +typedef int Py_ssize_t; +#endif + +#include "fec.h" + +#include "stdarg.h" + +static PyObject *py_fec_error; + +static char fec__doc__[] = "\ +FEC - Forward Error Correction \n\ +"; + +static char Encoder__doc__[] = "\ +Hold static encoder state (an in-memory table for matrix multiplication), and k and m parameters, and provide {encode()} method.\n\n\ +@param k: the number of packets required for reconstruction \n\ +@param m: the number of packets generated \n\ +"; + +typedef struct { + PyObject_HEAD + + /* expose these */ + unsigned short kk; + unsigned short mm; + + /* internal */ + fec_t* fec_matrix; +} Encoder; + +static PyObject * +Encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwdict) { + Encoder *self; + + self = (Encoder*)type->tp_alloc(type, 0); + if (self != NULL) { + self->kk = 0; + self->mm = 0; + self->fec_matrix = NULL; + } + + return (PyObject *)self; +} + +static int +Encoder_init(Encoder *self, PyObject *args, PyObject *kwdict) { + static char *kwlist[] = { + "k", + "m", + NULL + }; + int ink, inm; + if (!PyArg_ParseTupleAndKeywords(args, kwdict, "ii:Encoder.__init__", kwlist, &ink, &inm)) + return -1; + + if (ink < 1) { + PyErr_Format(py_fec_error, "Precondition violation: first argument is required to be greater than or equal to 1, but it was %d", ink); + return -1; + } + if (inm < 1) { + PyErr_Format(py_fec_error, "Precondition violation: second argument is required to be greater than or equal to 1, but it was %d", inm); + return -1; + } + if (inm > 256) { + PyErr_Format(py_fec_error, "Precondition violation: second argument is required to be less than or equal to 256, but it was %d", inm); + return -1; + } + if (ink > inm) { + PyErr_Format(py_fec_error, "Precondition violation: first argument is required to be less than or equal to the second argument, but they were %d and %d respectively", ink, inm); + return -1; + } + self->kk = (unsigned short)ink; + self->mm = (unsigned short)inm; + self->fec_matrix = fec_new(self->kk, self->mm); + + return 0; +} + +static char Encoder_encode__doc__[] = "\ +Encode data into m packets.\n\ +\n\ +@param inblocks: a sequence of k buffers of data to encode -- these are the k primary blocks, i.e. the input data split into k pieces (for best performance, make it a tuple instead of a list); All blocks are required to be the same length.\n\ +@param desired_blocks_nums optional sequence of blocknums indicating which blocks to produce and return; If None, all m blocks will be returned (in order). (For best performance, make it a tuple instead of a list.)\n\ +@returns: a list of buffers containing the requested blocks; Note that if any of the input blocks were 'primary blocks', i.e. their blocknum was < k, then the result sequence will contain a Python reference to the same Python object as was passed in. As long as the Python object in question is immutable (i.e. a string) then you don't have to think about this detail, but if it is mutable (i.e. an array), then you have to be aware that if you subsequently mutate the contents of that object then that will also change the contents of the sequence that was returned from this call to encode().\n\ +"; + +static PyObject * +Encoder_encode(Encoder *self, PyObject *args) { + PyObject* inblocks; + PyObject* desired_blocks_nums = NULL; /* The blocknums of the blocks that should be returned. */ + PyObject* result = NULL; + + gf** check_blocks_produced = (gf**)alloca((self->mm - self->kk) * sizeof(PyObject*)); /* This is an upper bound -- we will actually use only num_check_blocks_produced of these elements (see below). */ + PyObject** pystrs_produced = (PyObject**)alloca((self->mm - self->kk) * sizeof(PyObject*)); /* This is an upper bound -- we will actually use only num_check_blocks_produced of these elements (see below). */ + unsigned num_check_blocks_produced = 0; /* The first num_check_blocks_produced elements of the check_blocks_produced array and of the pystrs_produced array will be used. */ + const gf** incblocks = (const gf**)alloca(self->kk * sizeof(const gf*)); + unsigned num_desired_blocks; + PyObject* fast_desired_blocks_nums = NULL; + PyObject** fast_desired_blocks_nums_items; + unsigned* c_desired_blocks_nums = (unsigned*)alloca(self->mm * sizeof(unsigned)); + unsigned* c_desired_checkblocks_ids = (unsigned*)alloca((self->mm - self->kk) * sizeof(unsigned)); + unsigned i; + PyObject* fastinblocks = NULL; + PyObject** fastinblocksitems; + Py_ssize_t sz, oldsz = 0; + unsigned char check_block_index = 0; /* index into the check_blocks_produced and (parallel) pystrs_produced arrays */ + + if (!PyArg_ParseTuple(args, "O|O:Encoder.encode", &inblocks, &desired_blocks_nums)) + return NULL; + + for (i=0; imm - self->kk; i++) + pystrs_produced[i] = NULL; + if (desired_blocks_nums) { + fast_desired_blocks_nums = PySequence_Fast(desired_blocks_nums, "Second argument (optional) was not a sequence."); + if (!fast_desired_blocks_nums) + goto err; + num_desired_blocks = PySequence_Fast_GET_SIZE(fast_desired_blocks_nums); + fast_desired_blocks_nums_items = PySequence_Fast_ITEMS(fast_desired_blocks_nums); + for (i=0; i= self->kk) + num_check_blocks_produced++; + } + } else { + num_desired_blocks = self->mm; + for (i=0; imm - self->kk; + } + + fastinblocks = PySequence_Fast(inblocks, "First argument was not a sequence."); + if (!fastinblocks) + goto err; + + if (PySequence_Fast_GET_SIZE(fastinblocks) != self->kk) { + PyErr_Format(py_fec_error, "Precondition violation: Wrong length -- first argument (the sequence of input blocks) is required to contain exactly k blocks. len(first): %Zu, k: %d", PySequence_Fast_GET_SIZE(fastinblocks), self->kk); + goto err; + } + + /* Construct a C array of gf*'s of the input data. */ + fastinblocksitems = PySequence_Fast_ITEMS(fastinblocks); + if (!fastinblocksitems) + goto err; + + for (i=0; ikk; i++) { + if (!PyObject_CheckReadBuffer(fastinblocksitems[i])) { + PyErr_Format(py_fec_error, "Precondition violation: %u'th item is required to offer the single-segment read character buffer protocol, but it does not.", i); + goto err; + } + if (PyObject_AsReadBuffer(fastinblocksitems[i], (const void**)&(incblocks[i]), &sz)) + goto err; + if (oldsz != 0 && oldsz != sz) { + PyErr_Format(py_fec_error, "Precondition violation: Input blocks are required to be all the same length. length of one block was: %Zu, length of another block was: %Zu", oldsz, sz); + goto err; + } + oldsz = sz; + } + + /* Allocate space for all of the check blocks. */ + + for (i=0; i= self->kk) { + c_desired_checkblocks_ids[check_block_index] = c_desired_blocks_nums[i]; + pystrs_produced[check_block_index] = PyString_FromStringAndSize(NULL, sz); + if (pystrs_produced[check_block_index] == NULL) + goto err; + check_blocks_produced[check_block_index] = (gf*)PyString_AsString(pystrs_produced[check_block_index]); + if (check_blocks_produced[check_block_index] == NULL) + goto err; + check_block_index++; + } + } + assert (check_block_index == num_check_blocks_produced); + + /* Encode any check blocks that are needed. */ + fec_encode(self->fec_matrix, incblocks, check_blocks_produced, c_desired_checkblocks_ids, num_check_blocks_produced, sz); + + /* Wrap all requested blocks up into a Python list of Python strings. */ + result = PyList_New(num_desired_blocks); + if (result == NULL) + goto err; + check_block_index = 0; + for (i=0; ikk) { + Py_INCREF(fastinblocksitems[c_desired_blocks_nums[i]]); + if (PyList_SetItem(result, i, fastinblocksitems[c_desired_blocks_nums[i]]) == -1) { + Py_DECREF(fastinblocksitems[c_desired_blocks_nums[i]]); + goto err; + } + } else { + if (PyList_SetItem(result, i, pystrs_produced[check_block_index]) == -1) + goto err; + pystrs_produced[check_block_index] = NULL; + check_block_index++; + } + } + + goto cleanup; + err: + for (i=0; ifec_matrix) + fec_free(self->fec_matrix); + self->ob_type->tp_free((PyObject*)self); +} + +static PyMethodDef Encoder_methods[] = { + {"encode", (PyCFunction)Encoder_encode, METH_VARARGS, Encoder_encode__doc__}, + {NULL}, +}; + +static PyMemberDef Encoder_members[] = { + {"k", T_SHORT, offsetof(Encoder, kk), READONLY, "k"}, + {"m", T_SHORT, offsetof(Encoder, mm), READONLY, "m"}, + {NULL} /* Sentinel */ +}; + +static PyTypeObject Encoder_type = { + PyObject_HEAD_INIT(NULL) + 0, /*ob_size*/ + "_fec.Encoder", /*tp_name*/ + sizeof(Encoder), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor)Encoder_dealloc, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_compare*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash */ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ + Encoder__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + Encoder_methods, /* tp_methods */ + Encoder_members, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc)Encoder_init, /* tp_init */ + 0, /* tp_alloc */ + Encoder_new, /* tp_new */ +}; + +static char Decoder__doc__[] = "\ +Hold static decoder state (an in-memory table for matrix multiplication), and k and m parameters, and provide {decode()} method.\n\n\ +@param k: the number of packets required for reconstruction \n\ +@param m: the number of packets generated \n\ +"; + +typedef struct { + PyObject_HEAD + + /* expose these */ + unsigned short kk; + unsigned short mm; + + /* internal */ + fec_t* fec_matrix; +} Decoder; + +static PyObject * +Decoder_new(PyTypeObject *type, PyObject *args, PyObject *kwdict) { + Decoder *self; + + self = (Decoder*)type->tp_alloc(type, 0); + if (self != NULL) { + self->kk = 0; + self->mm = 0; + self->fec_matrix = NULL; + } + + return (PyObject *)self; +} + +static int +Decoder_init(Encoder *self, PyObject *args, PyObject *kwdict) { + static char *kwlist[] = { + "k", + "m", + NULL + }; + + int ink, inm; + if (!PyArg_ParseTupleAndKeywords(args, kwdict, "ii:Decoder.__init__", kwlist, &ink, &inm)) + return -1; + + if (ink < 1) { + PyErr_Format(py_fec_error, "Precondition violation: first argument is required to be greater than or equal to 1, but it was %d", ink); + return -1; + } + if (inm < 1) { + PyErr_Format(py_fec_error, "Precondition violation: second argument is required to be greater than or equal to 1, but it was %d", inm); + return -1; + } + if (inm > 256) { + PyErr_Format(py_fec_error, "Precondition violation: second argument is required to be less than or equal to 256, but it was %d", inm); + return -1; + } + if (ink > inm) { + PyErr_Format(py_fec_error, "Precondition violation: first argument is required to be less than or equal to the second argument, but they were %d and %d respectively", ink, inm); + return -1; + } + self->kk = (unsigned short)ink; + self->mm = (unsigned short)inm; + self->fec_matrix = fec_new(self->kk, self->mm); + + return 0; +} + +#define SWAP(a,b,t) {t tmp; tmp=a; a=b; b=tmp;} + +static char Decoder_decode__doc__[] = "\ +Decode a list blocks into a list of segments.\n\ +@param blocks a sequence of buffers containing block data (for best performance, make it a tuple instead of a list)\n\ +@param blocknums a sequence of integers of the blocknum for each block in blocks (for best performance, make it a tuple instead of a list)\n\ +\n\ +@return a list of strings containing the segment data (i.e. ''.join(retval) yields a string containing the decoded data)\n\ +"; + +static PyObject * +Decoder_decode(Decoder *self, PyObject *args) { + PyObject*restrict blocks; + PyObject*restrict blocknums; + PyObject* result = NULL; + + const gf**restrict cblocks = (const gf**restrict)alloca(self->kk * sizeof(const gf*)); + unsigned* cblocknums = (unsigned*)alloca(self->kk * sizeof(unsigned)); + gf**restrict recoveredcstrs = (gf**)alloca(self->kk * sizeof(gf*)); /* self->kk is actually an upper bound -- we probably won't need all of this space. */ + PyObject**restrict recoveredpystrs = (PyObject**restrict)alloca(self->kk * sizeof(PyObject*)); /* self->kk is actually an upper bound -- we probably won't need all of this space. */ + unsigned i; + PyObject*restrict fastblocknums = NULL; + PyObject*restrict fastblocks; + unsigned needtorecover=0; + PyObject** fastblocksitems; + PyObject** fastblocknumsitems; + Py_ssize_t sz, oldsz = 0; + long tmpl; + unsigned nextrecoveredix=0; + + if (!PyArg_ParseTuple(args, "OO:Decoder.decode", &blocks, &blocknums)) + return NULL; + + for (i=0; ikk; i++) + recoveredpystrs[i] = NULL; + fastblocks = PySequence_Fast(blocks, "First argument was not a sequence."); + if (!fastblocks) + goto err; + fastblocknums = PySequence_Fast(blocknums, "Second argument was not a sequence."); + if (!fastblocknums) + goto err; + + if (PySequence_Fast_GET_SIZE(fastblocks) != self->kk) { + PyErr_Format(py_fec_error, "Precondition violation: Wrong length -- first argument is required to contain exactly k blocks. len(first): %Zu, k: %d", PySequence_Fast_GET_SIZE(fastblocks), self->kk); + goto err; + } + if (PySequence_Fast_GET_SIZE(fastblocknums) != self->kk) { + PyErr_Format(py_fec_error, "Precondition violation: Wrong length -- blocknums is required to contain exactly k blocks. len(blocknums): %Zu, k: %d", PySequence_Fast_GET_SIZE(fastblocknums), self->kk); + goto err; + } + + /* Construct a C array of gf*'s of the data and another of C ints of the blocknums. */ + fastblocknumsitems = PySequence_Fast_ITEMS(fastblocknums); + if (!fastblocknumsitems) + goto err; + fastblocksitems = PySequence_Fast_ITEMS(fastblocks); + if (!fastblocksitems) + goto err; + + for (i=0; ikk; i++) { + if (!PyInt_Check(fastblocknumsitems[i])) { + PyErr_Format(py_fec_error, "Precondition violation: second argument is required to contain int."); + goto err; + } + tmpl = PyInt_AsLong(fastblocknumsitems[i]); + if (tmpl < 0 || tmpl > 255) { + PyErr_Format(py_fec_error, "Precondition violation: block nums can't be less than zero or greater than 255. %ld\n", tmpl); + goto err; + } + cblocknums[i] = (unsigned)tmpl; + if (cblocknums[i] >= self->kk) + needtorecover+=1; + + if (!PyObject_CheckReadBuffer(fastblocksitems[i])) { + PyErr_Format(py_fec_error, "Precondition violation: %u'th item is required to offer the single-segment read character buffer protocol, but it does not.\n", i); + goto err; + } + if (PyObject_AsReadBuffer(fastblocksitems[i], (const void**)&(cblocks[i]), &sz)) + goto err; + if (oldsz != 0 && oldsz != sz) { + PyErr_Format(py_fec_error, "Precondition violation: Input blocks are required to be all the same length. length of one block was: %Zu, length of another block was: %Zu\n", oldsz, sz); + goto err; + } + oldsz = sz; + } + + /* Move src packets into position. At the end of this loop we want the i'th + element of the arrays to be the block with block number i, if that block + is among our inputs. */ + for (i=0; ikk;) { + if (cblocknums[i] >= self->kk || cblocknums[i] == i) + i++; + else { + /* put pkt in the right position. */ + unsigned c = cblocknums[i]; + + SWAP (cblocknums[i], cblocknums[c], int); + SWAP (cblocks[i], cblocks[c], const gf*); + SWAP (fastblocksitems[i], fastblocksitems[c], PyObject*); + } + } + + /* Allocate space for all of the recovered blocks. */ + for (i=0; ifec_matrix, cblocks, recoveredcstrs, cblocknums, sz); + + /* Wrap up both original primary blocks and decoded blocks into a Python list of Python strings. */ + result = PyList_New(self->kk); + if (result == NULL) + goto err; + for (i=0; ikk; i++) { + if (cblocknums[i] == i) { + /* Original primary block. */ + Py_INCREF(fastblocksitems[i]); + if (PyList_SetItem(result, i, fastblocksitems[i]) == -1) { + Py_DECREF(fastblocksitems[i]); + goto err; + } + } else { + /* Recovered block. */ + if (PyList_SetItem(result, i, recoveredpystrs[nextrecoveredix]) == -1) + goto err; + recoveredpystrs[nextrecoveredix] = NULL; + nextrecoveredix++; + } + } + + goto cleanup; + err: + for (i=0; ikk; i++) + Py_XDECREF(recoveredpystrs[i]); + Py_XDECREF(result); result = NULL; + cleanup: + Py_XDECREF(fastblocks); fastblocks=NULL; + Py_XDECREF(fastblocknums); fastblocknums=NULL; + return result; +} + +static void +Decoder_dealloc(Decoder * self) { + if (self->fec_matrix) + fec_free(self->fec_matrix); + self->ob_type->tp_free((PyObject*)self); +} + +static PyMethodDef Decoder_methods[] = { + {"decode", (PyCFunction)Decoder_decode, METH_VARARGS, Decoder_decode__doc__}, + {NULL}, +}; + +static PyMemberDef Decoder_members[] = { + {"k", T_SHORT, offsetof(Encoder, kk), READONLY, "k"}, + {"m", T_SHORT, offsetof(Encoder, mm), READONLY, "m"}, + {NULL} /* Sentinel */ +}; + +static PyTypeObject Decoder_type = { + PyObject_HEAD_INIT(NULL) + 0, /*ob_size*/ + "_fec.Decoder", /*tp_name*/ + sizeof(Decoder), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor)Decoder_dealloc, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_compare*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash */ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ + Decoder__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + Decoder_methods, /* tp_methods */ + Decoder_members, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc)Decoder_init, /* tp_init */ + 0, /* tp_alloc */ + Decoder_new, /* tp_new */ +}; + + +void +_hexwrite(unsigned char*s, size_t l) { + size_t i; + for (i = 0; i < l; i++) + printf("%.2x", s[i]); +} + + +PyObject* +test_from_agl(PyObject* self, PyObject* args) { + unsigned char b0c[8], b1c[8]; + unsigned char b0[8], b1[8], b2[8], b3[8], b4[8]; + + const unsigned char *blocks[3] = {b0, b1, b2}; + unsigned char *outblocks[2] = {b3, b4}; + unsigned block_nums[] = {3, 4}; + + fec_t *const fec = fec_new(3, 5); + + const unsigned char *inpkts[] = {b3, b4, b2}; + unsigned char *outpkts[] = {b0, b1}; + unsigned indexes[] = {3, 4, 2}; + + memset(b0, 1, 8); + memset(b1, 2, 8); + memset(b2, 3, 8); + + /*printf("_from_c before encoding:\n"); + printf("b0: "); _hexwrite(b0, 8); printf(", "); + printf("b1: "); _hexwrite(b1, 8); printf(", "); + printf("b2: "); _hexwrite(b2, 8); printf(", "); + printf("\n");*/ + + fec_encode(fec, blocks, outblocks, block_nums, 2, 8); + + /*printf("after encoding:\n"); + printf("b3: "); _hexwrite(b3, 8); printf(", "); + printf("b4: "); _hexwrite(b4, 8); printf(", "); + printf("\n");*/ + + memcpy(b0c, b0, 8); memcpy(b1c, b1, 8); + + fec_decode(fec, inpkts, outpkts, indexes, 8); + + /*printf("after decoding:\n"); + printf("b0: "); _hexwrite(b0, 8); printf(", "); + printf("b1: "); _hexwrite(b1, 8); + printf("\n");*/ + + if ((memcmp(b0, b0c,8) == 0) && (memcmp(b1, b1c,8) == 0)) + Py_RETURN_TRUE; + else + Py_RETURN_FALSE; +} + +static PyMethodDef fec_functions[] = { + {"test_from_agl", test_from_agl, METH_NOARGS, NULL}, + {NULL} +}; + +#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ +#define PyMODINIT_FUNC void +#endif +PyMODINIT_FUNC +init_fec(void) { + PyObject *module; + PyObject *module_dict; + + if (PyType_Ready(&Encoder_type) < 0) + return; + if (PyType_Ready(&Decoder_type) < 0) + return; + + module = Py_InitModule3("_fec", fec_functions, fec__doc__); + if (module == NULL) + return; + + Py_INCREF(&Encoder_type); + Py_INCREF(&Decoder_type); + + PyModule_AddObject(module, "Encoder", (PyObject *)&Encoder_type); + PyModule_AddObject(module, "Decoder", (PyObject *)&Decoder_type); + + module_dict = PyModule_GetDict(module); + py_fec_error = PyErr_NewException("_fec.Error", NULL, NULL); + PyDict_SetItemString(module_dict, "Error", py_fec_error); +} + +/** + * originally inspired by fecmodule.c by the Mnet Project, especially Myers + * Carpenter and Hauke Johannknecht + */ diff --git a/zfec/bench/bench_zfec.py b/zfec/bench/bench_zfec.py deleted file mode 100644 index 9a9b8e2..0000000 --- a/zfec/bench/bench_zfec.py +++ /dev/null @@ -1,109 +0,0 @@ -from zfec import easyfec, Encoder, filefec -from pyutil import mathutil - -import os, sys - -from pyutil import benchutil - -FNAME="benchrandom.data" - -def _make_new_rand_file(size): - open(FNAME, "wb").write(os.urandom(size)) - -def donothing(results, reslenthing): - pass - -K=3 -M=10 - -d = "" -ds = [] -easyfecenc = None -fecenc = None -def _make_new_rand_data(size, k, m): - global d, easyfecenc, fecenc, K, M - K = k - M = m - d = os.urandom(size) - del ds[:] - ds.extend([None]*k) - blocksize = mathutil.div_ceil(size, k) - for i in range(k): - ds[i] = d[i*blocksize:(i+1)*blocksize] - ds[-1] = ds[-1] + "\x00" * (len(ds[-2]) - len(ds[-1])) - easyfecenc = easyfec.Encoder(k, m) - fecenc = Encoder(k, m) - -import sha -hashers = [ sha.new() for i in range(M) ] -def hashem(results, reslenthing): - for i, result in enumerate(results): - hashers[i].update(result) - -def _encode_file(N): - filefec.encode_file(open(FNAME, "rb"), donothing, K, M) - -def _encode_file_stringy(N): - filefec.encode_file_stringy(open(FNAME, "rb"), donothing, K, M) - -def _encode_file_stringy_easyfec(N): - filefec.encode_file_stringy_easyfec(open(FNAME, "rb"), donothing, K, M) - -def _encode_file_not_really(N): - filefec.encode_file_not_really(open(FNAME, "rb"), donothing, K, M) - -def _encode_file_not_really_and_hash(N): - filefec.encode_file_not_really_and_hash(open(FNAME, "rb"), donothing, K, M) - -def _encode_file_and_hash(N): - filefec.encode_file(open(FNAME, "rb"), hashem, K, M) - -def _encode_data_not_really(N): - # This function is to see how long it takes to run the Python code - # that does this benchmarking and accounting and so on but not - # actually do any erasure-coding, in order to get an idea of how - # much overhead there is in using Python. This exercises the - # basic behavior of allocating buffers to hold the secondary - # shares. - sz = N // K - for i in range(M-K): - x = '\x00' * sz - -def _encode_data_easyfec(N): - easyfecenc.encode(d) - -def _encode_data_fec(N): - fecenc.encode(ds) - -def bench(k, m): - SIZE = 10**6 - MAXREPS = 64 - # for f in [_encode_file_stringy_easyfec, _encode_file_stringy, _encode_file, _encode_file_not_really,]: - # for f in [_encode_file,]: - # for f in [_encode_file_not_really, _encode_file_not_really_and_hash, _encode_file, _encode_file_and_hash,]: - # for f in [_encode_data_not_really, _encode_data_easyfec, _encode_data_fec,]: - print "measuring encoding of data with K=%d, M=%d, reporting results in nanoseconds per byte after encoding %d bytes %d times in a row..." % (k, m, SIZE, MAXREPS) - # for f in [_encode_data_fec, _encode_data_not_really]: - for f in [_encode_data_fec]: - def _init_func(size): - return _make_new_rand_data(size, k, m) - for BSIZE in [SIZE]: - results = benchutil.rep_bench(f, n=BSIZE, initfunc=_init_func, MAXREPS=MAXREPS, MAXTIME=None, UNITS_PER_SECOND=1000000000) - print "and now represented in MB/s..." - print - best = results['best'] - mean = results['mean'] - worst = results['worst'] - print "best: % 4.3f MB/sec" % (10**3 / best) - print "mean: % 4.3f MB/sec" % (10**3 / mean) - print "worst: % 4.3f MB/sec" % (10**3 / worst) - -k = K -m = M -for arg in sys.argv: - if arg.startswith('--k='): - k = int(arg[len('--k='):]) - if arg.startswith('--m='): - m = int(arg[len('--m='):]) - -bench(k, m) diff --git a/zfec/changelog b/zfec/changelog deleted file mode 100644 index d4b56a4..0000000 --- a/zfec/changelog +++ /dev/null @@ -1,47 +0,0 @@ -Thu Dec 20 13:55:55 MST 2007 zooko@zooko.com - * zfec: silence a warning when compiling on Mac OS X with gcc, and refactor a complicated #define stanza into the shared header file - -Thu Dec 20 13:55:32 MST 2007 zooko@zooko.com - * zfec: setup: include _version.py so that the zfec package has a version number again - -Thu Dec 20 09:33:55 MST 2007 zooko@zooko.com - tagged zfec-1.3.1 - -Thu Dec 20 09:31:13 MST 2007 zooko@zooko.com - * zfec: dual-license under GPL and TGPPL - -Thu Dec 20 09:26:16 MST 2007 zooko@zooko.com - tagged zfec-1.3.0 - -Thu Dec 20 09:25:31 MST 2007 zooko@zooko.com - * zfec: add "changelog" file, which contains descriptions of the darcs patches since the last release that I think are interesting to users - -Thu Dec 20 09:23:41 MST 2007 zooko@zooko.com - * zfec: setup: require setuptools_darcs >= 1.1.0 (fixes problem with building incomplete packages) - -Wed Nov 14 09:44:26 MST 2007 zooko@zooko.com - * zfec: set STRIDE to 8192 after extensive experimentation on my PowerPC G4 867 MHz (256 KB L2 cache) - - -Mon Nov 12 07:58:19 MST 2007 zooko@zooko.com - * zfec: reorder the inner loop to be more cache-friendly - - Loop over this stride of each input block before looping over all strides of - this input block. In theory, this should allow the strides of the input blocks - to remain in cache while we produce all of the output blocks. - - -Sun Nov 11 10:04:44 MST 2007 zooko@zooko.com - * zfec: do encoding within a fixed window of memory in order to be cache friendly - -Tue Nov 13 13:13:52 MST 2007 zooko@zooko.com - * zfec: conditionally-compile the right magic to use alloca() with gcc -mno-cygwin - -Tue Nov 13 13:11:33 MST 2007 zooko@zooko.com - * zfec: setup: fix the spelling of "zfec.fec" package name - -Sun Nov 11 08:50:54 MST 2007 zooko@zooko.com - * zfec: add a TODO note - -Fri Nov 9 11:17:04 MST 2007 zooko@zooko.com - tagged zfec-1.2.0 diff --git a/zfec/cmdline_zfec.py b/zfec/cmdline_zfec.py new file mode 100755 index 0000000..2eee2b0 --- /dev/null +++ b/zfec/cmdline_zfec.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +# zfec -- a fast C implementation of Reed-Solomon erasure coding with +# command-line, C, and Python interfaces + +import sys + +import argparse +import filefec + +from zfec import __version__ as libversion +__version__ = libversion + +DEFAULT_K=3 +DEFAULT_M=8 + +def main(): + + if '-V' in sys.argv or '--version' in sys.argv: + print "zfec library version: ", libversion + print "zfec command-line tool version: ", __version__ + sys.exit(0) + + parser = argparse.ArgumentParser(description="Encode a file into a set of share files, a subset of which can later be used to recover the original file.") + + parser.add_argument('inputfile', help='file to encode or "-" for stdin', type=argparse.FileType('rb'), metavar='INF') + parser.add_argument('-d', '--output-dir', help='directory in which share file names will be created (default ".")', default='.', metavar='D') + parser.add_argument('-p', '--prefix', help='prefix for share file names; If omitted, the name of the input file will be used.', metavar='P') + parser.add_argument('-s', '--suffix', help='suffix for share file names (default ".fec")', default='.fec', metavar='S') + parser.add_argument('-m', '--totalshares', help='the total number of share files created (default %d)' % DEFAULT_M, default=DEFAULT_M, type=int, metavar='M') + parser.add_argument('-k', '--requiredshares', help='the number of share files required to reconstruct (default %d)' % DEFAULT_K, default=DEFAULT_K, type=int, metavar='K') + parser.add_argument('-f', '--force', help='overwrite any file which already in place an output file (share file)', action='store_true') + parser.add_argument('-v', '--verbose', help='print out messages about progress', action='store_true') + parser.add_argument('-q', '--quiet', help='quiet progress indications and warnings about silly choices of K and M', action='store_true') + parser.add_argument('-V', '--version', help='print out version number and exit', action='store_true') + args = parser.parse_args() + + if args.prefix is None: + args.prefix = args.inputfile.name + if args.prefix == "": + args.prefix = "" + + if args.verbose and args.quiet: + print "Please choose only one of --verbose and --quiet." + sys.exit(1) + + if args.totalshares > 256 or args.totalshares < 1: + print "Invalid parameters, totalshares is required to be <= 256 and >= 1\nPlease see the accompanying documentation." + sys.exit(1) + if args.requiredshares > args.totalshares or args.requiredshares < 1: + print "Invalid parameters, requiredshares is required to be <= totalshares and >= 1\nPlease see the accompanying documentation." + sys.exit(1) + + if not args.quiet: + if args.requiredshares == 1: + print "warning: silly parameters: requiredshares == 1, which means that every share will be a complete copy of the file. You could use \"cp\" for the same effect. But proceeding to do it anyway..." + if args.requiredshares == args.totalshares: + print "warning: silly parameters: requiredshares == totalshares, which means that all shares will be required in order to reconstruct the file. You could use \"split\" for the same effect. But proceeding to do it anyway..." + + args.inputfile.seek(0, 2) + fsize = args.inputfile.tell() + args.inputfile.seek(0, 0) + return filefec.encode_to_files(args.inputfile, fsize, args.output_dir, args.prefix, args.requiredshares, args.totalshares, args.suffix, args.force, args.verbose) + +# zfec -- fast forward error correction library with Python interface +# +# Copyright (C) 2007 Allmydata, Inc. +# Author: Zooko Wilcox-O'Hearn +# +# This file is part of zfec. +# +# See README.rst for licensing information. diff --git a/zfec/cmdline_zunfec.py b/zfec/cmdline_zunfec.py new file mode 100755 index 0000000..8fc0c04 --- /dev/null +++ b/zfec/cmdline_zunfec.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# zfec -- a fast C implementation of Reed-Solomon erasure coding with +# command-line, C, and Python interfaces + +import os, sys + +import argparse +import filefec + +from zfec import __version__ as libversion +__version__ = libversion + +def main(): + if '-V' in sys.argv or '--version' in sys.argv: + print "zfec library version: ", libversion + print "zunfec command-line tool version: ", __version__ + return 0 + + parser = argparse.ArgumentParser(description="Decode data from share files.") + + parser.add_argument('-o', '--outputfile', required=True, help='file to write the resulting data to, or "-" for stdout', type=str, metavar='OUTF') + parser.add_argument('sharefiles', nargs='*', help='shares file to read the encoded data from', type=unicode, metavar='SHAREFILE') + parser.add_argument('-v', '--verbose', help='print out messages about progress', action='store_true') + parser.add_argument('-f', '--force', help='overwrite any file which already in place of the output file', action='store_true') + parser.add_argument('-V', '--version', help='print out version number and exit', action='store_true') + args = parser.parse_args() + + if len(args.sharefiles) < 2: + print "At least two sharefiles are required." + return 1 + + if args.force: + outf = open(args.outputfile, 'wb') + else: + try: + flags = os.O_WRONLY|os.O_CREAT|os.O_EXCL | (hasattr(os, 'O_BINARY') and os.O_BINARY) + outfd = os.open(args.outputfile, flags) + except OSError: + print "There is already a file named %r -- aborting. Use --force to overwrite." % (args.outputfile,) + return 2 + outf = os.fdopen(outfd, "wb") + + sharefs = [] + # This sort() actually matters for performance (shares with numbers < k + # are much faster to use than the others), as well as being important for + # reproducibility. + args.sharefiles.sort() + for fn in args.sharefiles: + sharefs.append(open(fn, 'rb')) + try: + ret = filefec.decode_from_files(outf, sharefs, args.verbose) + except filefec.InsufficientShareFilesError, e: + print str(e) + return 3 + + return 0 + +# zfec -- fast forward error correction library with Python interface +# +# Copyright (C) 2007 Allmydata, Inc. +# Author: Zooko Wilcox-O'Hearn +# +# This file is part of zfec. +# +# See README.rst for licensing information. diff --git a/zfec/copyright b/zfec/copyright deleted file mode 100644 index b6a4020..0000000 --- a/zfec/copyright +++ /dev/null @@ -1,246 +0,0 @@ -This package was debianized by Zooko O'Whielacronx zooko@zooko.com on -Mon, 22 June 2009 23:30:00 +0000. - -It was originally downloaded from http://allmydata.org/trac/zfec - -Upstream Author: Zooko O'Whielacronx - -Copyright: - -You may use this package under the GNU General Public License, version -2 or, at your option, any later version. You may use this package -under the Transitive Grace Period Public Licence, version 1.0 or, at -your option, any later version. (You may choose to use this package -under the terms of either licence, at your option.) See the file -COPYING.GPL for the terms of the GNU General Public License, version 2. -See the file COPYING.TGPPL.rst or the text of the TGPPL in this file, -below, for the terms of the Transitive Grace Period Public Licence, -version 1.0. - -------- -The Debian packaging is © 2009 Zooko O'Whielacronx - it is -licensed under the same terms as the zfec source code itself. - -On Debian systems, the complete text of the GNU General Public License -can be found in `/usr/share/common-licenses/GPL'. - -------- -Portions derived from the "fec" software by Luigi Rizzo, et -al., the copyright notice and licence terms of which are included below -for reference. - -fec/fec.[ch] -- forward error correction based on Vandermonde matrices -980614 -(C) 1997-98 Luigi Rizzo (luigi@iet.unipi.it) - -Portions derived from code by Phil Karn (karn@ka9q.ampr.org), -Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari -Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 - -Modifications by Dan Rubenstein (see Modifications.txt for -their description. -Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials - provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, -OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, -OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR -TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -OF SUCH DAMAGE. - -------- - Transitive Grace Period Public Licence ("TGPPL") v. 1.0 - - This Transitive Grace Period Public Licence (the "License") applies to any - original work of authorship (the "Original Work") whose owner (the - "Licensor") has placed the following licensing notice adjacent to the - copyright notice for the Original Work: - - Licensed under the Transitive Grace Period Public Licence version 1.0 - - 1. Grant of Copyright License. Licensor grants You a worldwide, - royalty-free, non-exclusive, sublicensable license, for the duration - of the copyright, to do the following: - a. to reproduce the Original Work in copies, either alone or as part - of a collective work; - b. to translate, adapt, alter, transform, modify, or arrange the - Original Work, thereby creating derivative works ("Derivative - Works") based upon the Original Work; - c. to distribute or communicate copies of the Original Work and - Derivative Works to the public, with the proviso that copies of - Original Work or Derivative Works that You distribute or - communicate shall be licensed under this Transitive Grace Period - Public Licence no later than 12 months after You distributed or - communicated said copies; - d. to perform the Original Work publicly; and - e. to display the Original Work publicly. - 2. Grant of Patent License. Licensor grants You a worldwide, - royalty-free, non-exclusive, sublicensable license, under patent - claims owned or controlled by the Licensor that are embodied in the - Original Work as furnished by the Licensor, for the duration of the - patents, to make, use, sell, offer for sale, have made, and import the - Original Work and Derivative Works. - 3. Grant of Source Code License. The term "Source Code" means the - preferred form of the Original Work for making modifications to it and - all available documentation describing how to modify the Original - Work. Licensor agrees to provide a machine-readable copy of the Source - Code of the Original Work along with each copy of the Original Work - that Licensor distributes. Licensor reserves the right to satisfy this - obligation by placing a machine-readable copy of the Source Code in an - information repository reasonably calculated to permit inexpensive and - convenient access by You for as long as Licensor continues to - distribute the Original Work. - 4. Exclusions From License Grant. Neither the names of Licensor, nor the - names of any contributors to the Original Work, nor any of their - trademarks or service marks, may be used to endorse or promote - products derived from this Original Work without express prior - permission of the Licensor. Except as expressly stated herein, nothing - in this License grants any license to Licensor's trademarks, - copyrights, patents, trade secrets or any other intellectual property. - No patent license is granted to make, use, sell, offer for sale, have - made, or import embodiments of any patent claims other than the - licensed claims defined in Section 2. No license is granted to the - trademarks of Licensor even if such marks are included in the Original - Work. Nothing in this License shall be interpreted to prohibit - Licensor from licensing under terms different from this License any - Original Work that Licensor otherwise would have a right to license. - 5. External Deployment. The term "External Deployment" means the use, - distribution, or communication of the Original Work or Derivative - Works in any way such that the Original Work or Derivative Works may - be used by anyone other than You, whether those works are distributed - or communicated to those persons or made available as an application - intended for use over a network. As an express condition for the - grants of license hereunder, You must treat any External Deployment by - You of the Original Work or a Derivative Work as a distribution under - section 1(c). - 6. Attribution Rights. You must retain, in the Source Code of any - Derivative Works that You create, all copyright, patent, or trademark - notices from the Source Code of the Original Work, as well as any - notices of licensing and any descriptive text identified therein as an - "Attribution Notice." You must cause the Source Code for any - Derivative Works that You create to carry a prominent Attribution - Notice reasonably calculated to inform recipients that You have - modified the Original Work. - 7. Warranty of Provenance and Disclaimer of Warranty. Licensor warrants - that the copyright in and to the Original Work and the patent rights - granted herein by Licensor are owned by the Licensor or are - sublicensed to You under the terms of this License with the permission - of the contributor(s) of those copyrights and patent rights. Except as - expressly stated in the immediately preceding sentence, the Original - Work is provided under this License on an "AS IS" BASIS and WITHOUT - WARRANTY, either express or implied, including, without limitation, - the warranties of non-infringement, merchantability or fitness for a - particular purpose. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL - WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential - part of this License. No license to the Original Work is granted by - this License except under this disclaimer. - 8. Limitation of Liability. Under no circumstances and under no legal - theory, whether in tort (including negligence), contract, or - otherwise, shall the Licensor be liable to anyone for any indirect, - special, incidental, or consequential damages of any character arising - as a result of this License or the use of the Original Work including, - without limitation, damages for loss of goodwill, work stoppage, - computer failure or malfunction, or any and all other commercial - damages or losses. This limitation of liability shall not apply to the - extent applicable law prohibits such limitation. - 9. Acceptance and Termination. If, at any time, You expressly assented to - this License, that assent indicates your clear and irrevocable - acceptance of this License and all of its terms and conditions. If You - distribute or communicate copies of the Original Work or a Derivative - Work, You must make a reasonable effort under the circumstances to - obtain the express assent of recipients to the terms of this License. - This License conditions your rights to undertake the activities listed - in Section 1, including your right to create Derivative Works based - upon the Original Work, and doing so without honoring these terms and - conditions is prohibited by copyright law and international treaty. - Nothing in this License is intended to affect copyright exceptions and - limitations (including 'fair use' or 'fair dealing'). This License - shall terminate immediately and You may no longer exercise any of the - rights granted to You by this License upon your failure to honor the - conditions in Section 1(c). - 10. Termination for Patent Action. This License shall terminate - automatically and You may no longer exercise any of the rights granted - to You by this License as of the date You commence an action, - including a cross-claim or counterclaim, against Licensor or any - licensee alleging that the Original Work infringes a patent. This - termination provision shall not apply for an action alleging patent - infringement by combinations of the Original Work with other software - or hardware. - 11. Jurisdiction, Venue and Governing Law. Any action or suit relating to - this License may be brought only in the courts of a jurisdiction - wherein the Licensor resides or in which Licensor conducts its primary - business, and under the laws of that jurisdiction excluding its - conflict-of-law provisions. The application of the United Nations - Convention on Contracts for the International Sale of Goods is - expressly excluded. Any use of the Original Work outside the scope of - this License or after its termination shall be subject to the - requirements and penalties of copyright or patent law in the - appropriate jurisdiction. This section shall survive the termination - of this License. - 12. Attorneys' Fees. In any action to enforce the terms of this License or - seeking damages relating thereto, the prevailing party shall be - entitled to recover its costs and expenses, including, without - limitation, reasonable attorneys' fees and costs incurred in - connection with such action, including any appeal of such action. This - section shall survive the termination of this License. - 13. Miscellaneous. If any provision of this License is held to be - unenforceable, such provision shall be reformed only to the extent - necessary to make it enforceable. - 14. Definition of "You" in This License. "You" throughout this License, - whether in upper or lower case, means an individual or a legal entity - exercising rights under, and complying with all of the terms of, this - License. For legal entities, "You" includes any entity that controls, - is controlled by, or is under common control with you. For purposes of - this definition, "control" means (i) the power, direct or indirect, to - cause the direction or management of such entity, whether by contract - or otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - 15. Right to Use. You may use the Original Work in all ways not otherwise - restricted or conditioned by this License or by law, and Licensor - promises not to interfere with or be responsible for such uses by You. - 16. Modification of This License. This License is Copyright (c) 2007 Zooko - Wilcox-O'Hearn. Permission is granted to copy, distribute, or - communicate this License without modification. Nothing in this License - permits You to modify this License as applied to the Original Work or - to Derivative Works. However, You may modify the text of this License - and copy, distribute or communicate your modified version (the - "Modified License") and apply it to other original works of authorship - subject to the following conditions: (i) You may not indicate in any - way that your Modified License is the "Transitive Grace Period Public - Licence" or "TGPPL" and you may not use those names in the name of - your Modified License; and (ii) You must replace the notice specified - in the first paragraph above with the notice "Licensed under " or with - a notice of your own that is not confusingly similar to the notice in - this License. - -------- -Regarding the text "License: BSD3" which appears in -haskell/Codec/FEC.hs: - -On 2008-01-20, Zooko O'Whielacronx wrote: "Will you please give me -rights to use your patches however I like? If you do so, I will -immediately publish your patches under the GPL and under the TGPPL, but -I will also reserve the right to use your patches however I like, or -more probably to delegate that right to allmydata.com." - -Adam Langley replied: "Certainly. I would generally say that GPL is a -little strict for this sort of thing (LGPL would be better), but I put -zfec on Hackage (the Haskell CPAN) with a GPL tag. You may do with the -Haskell code as you wish, including relicensing etc." diff --git a/zfec/easyfec.py b/zfec/easyfec.py new file mode 100644 index 0000000..dd209e7 --- /dev/null +++ b/zfec/easyfec.py @@ -0,0 +1,64 @@ +# zfec -- a fast C implementation of Reed-Solomon erasure coding with +# command-line, C, and Python interfaces + +import zfec + +# div_ceil() was copied from the pyutil library. +def div_ceil(n, d): + """ + The smallest integer k such that k*d >= n. + """ + return (n/d) + (n%d != 0) + +from base64 import b32encode +def ab(x): # debuggery + if len(x) >= 3: + return "%s:%s" % (len(x), b32encode(x[-3:]),) + elif len(x) == 2: + return "%s:%s" % (len(x), b32encode(x[-2:]),) + elif len(x) == 1: + return "%s:%s" % (len(x), b32encode(x[-1:]),) + elif len(x) == 0: + return "%s:%s" % (len(x), "--empty--",) + +class Encoder(object): + def __init__(self, k, m): + self.fec = zfec.Encoder(k, m) + + def encode(self, data): + """ + @param data: string + + @return: a sequence of m blocks -- any k of which suffice to + reconstruct the input data + """ + chunksize = div_ceil(len(data), self.fec.k) + l = [ data[i*chunksize:(i+1)*chunksize] + "\x00" * min(chunksize, (((i+1)*chunksize)-len(data))) for i in range(self.fec.k) ] + assert len(l) == self.fec.k, (len(l), self.fec.k,) + assert (not l) or (not [ x for x in l if len(x) != len(l[0]) ], (len(l), [ ab(x) for x in l ], chunksize, self.fec.k, len(data),)) + return self.fec.encode(l) + +class Decoder(object): + def __init__(self, k, m): + self.fec = zfec.Decoder(k, m) + + def decode(self, blocks, sharenums, padlen): + """ + @param padlen: the number of bytes of padding to strip off; Note that + the padlen is always equal to (blocksize times k) minus the length + of data. (Therefore, padlen can be 0.) + """ + data = ''.join(self.fec.decode(blocks, sharenums)) + if padlen: + return data[:-padlen] + else: + return data + +# zfec -- fast forward error correction library with Python interface +# +# Copyright (C) 2007 Allmydata, Inc. +# Author: Zooko Wilcox-O'Hearn +# +# This file is part of zfec. +# +# See README.rst for licensing information. diff --git a/zfec/fec.c b/zfec/fec.c new file mode 100644 index 0000000..751c00a --- /dev/null +++ b/zfec/fec.c @@ -0,0 +1,572 @@ +/** + * zfec -- fast forward error correction library with Python interface + */ + +#include "fec.h" + +#include +#include +#include +#include + +/* + * Primitive polynomials - see Lin & Costello, Appendix A, + * and Lee & Messerschmitt, p. 453. + */ +static const char*const Pp="101110001"; + + +/* + * To speed up computations, we have tables for logarithm, exponent and + * inverse of a number. We use a table for multiplication as well (it takes + * 64K, no big deal even on a PDA, especially because it can be + * pre-initialized an put into a ROM!), otherwhise we use a table of + * logarithms. In any case the macro gf_mul(x,y) takes care of + * multiplications. + */ + +static gf gf_exp[510]; /* index->poly form conversion table */ +static int gf_log[256]; /* Poly->index form conversion table */ +static gf inverse[256]; /* inverse of field elem. */ + /* inv[\alpha**i]=\alpha**(GF_SIZE-i-1) */ + +/* + * modnn(x) computes x % GF_SIZE, where GF_SIZE is 2**GF_BITS - 1, + * without a slow divide. + */ +static gf +modnn(int x) { + while (x >= 255) { + x -= 255; + x = (x >> 8) + (x & 255); + } + return x; +} + +#define SWAP(a,b,t) {t tmp; tmp=a; a=b; b=tmp;} + +/* + * gf_mul(x,y) multiplies two numbers. It is much faster to use a + * multiplication table. + * + * USE_GF_MULC, GF_MULC0(c) and GF_ADDMULC(x) can be used when multiplying + * many numbers by the same constant. In this case the first call sets the + * constant, and others perform the multiplications. A value related to the + * multiplication is held in a local variable declared with USE_GF_MULC . See + * usage in _addmul1(). + */ +static gf gf_mul_table[256][256]; + +#define gf_mul(x,y) gf_mul_table[x][y] + +#define USE_GF_MULC register gf * __gf_mulc_ + +#define GF_MULC0(c) __gf_mulc_ = gf_mul_table[c] +#define GF_ADDMULC(dst, x) dst ^= __gf_mulc_[x] + +/* + * Generate GF(2**m) from the irreducible polynomial p(X) in p[0]..p[m] + * Lookup tables: + * index->polynomial form gf_exp[] contains j= \alpha^i; + * polynomial form -> index form gf_log[ j = \alpha^i ] = i + * \alpha=x is the primitive element of GF(2^m) + * + * For efficiency, gf_exp[] has size 2*GF_SIZE, so that a simple + * multiplication of two numbers can be resolved without calling modnn + */ +static void +_init_mul_table(void) { + int i, j; + for (i = 0; i < 256; i++) + for (j = 0; j < 256; j++) + gf_mul_table[i][j] = gf_exp[modnn (gf_log[i] + gf_log[j])]; + + for (j = 0; j < 256; j++) + gf_mul_table[0][j] = gf_mul_table[j][0] = 0; +} + +#define NEW_GF_MATRIX(rows, cols) \ + (gf*)malloc(rows * cols) + +/* + * initialize the data structures used for computations in GF. + */ +static void +generate_gf (void) { + int i; + gf mask; + + mask = 1; /* x ** 0 = 1 */ + gf_exp[8] = 0; /* will be updated at the end of the 1st loop */ + /* + * first, generate the (polynomial representation of) powers of \alpha, + * which are stored in gf_exp[i] = \alpha ** i . + * At the same time build gf_log[gf_exp[i]] = i . + * The first 8 powers are simply bits shifted to the left. + */ + for (i = 0; i < 8; i++, mask <<= 1) { + gf_exp[i] = mask; + gf_log[gf_exp[i]] = i; + /* + * If Pp[i] == 1 then \alpha ** i occurs in poly-repr + * gf_exp[8] = \alpha ** 8 + */ + if (Pp[i] == '1') + gf_exp[8] ^= mask; + } + /* + * now gf_exp[8] = \alpha ** 8 is complete, so can also + * compute its inverse. + */ + gf_log[gf_exp[8]] = 8; + /* + * Poly-repr of \alpha ** (i+1) is given by poly-repr of + * \alpha ** i shifted left one-bit and accounting for any + * \alpha ** 8 term that may occur when poly-repr of + * \alpha ** i is shifted. + */ + mask = 1 << 7; + for (i = 9; i < 255; i++) { + if (gf_exp[i - 1] >= mask) + gf_exp[i] = gf_exp[8] ^ ((gf_exp[i - 1] ^ mask) << 1); + else + gf_exp[i] = gf_exp[i - 1] << 1; + gf_log[gf_exp[i]] = i; + } + /* + * log(0) is not defined, so use a special value + */ + gf_log[0] = 255; + /* set the extended gf_exp values for fast multiply */ + for (i = 0; i < 255; i++) + gf_exp[i + 255] = gf_exp[i]; + + /* + * again special cases. 0 has no inverse. This used to + * be initialized to 255, but it should make no difference + * since noone is supposed to read from here. + */ + inverse[0] = 0; + inverse[1] = 1; + for (i = 2; i <= 255; i++) + inverse[i] = gf_exp[255 - gf_log[i]]; +} + +/* + * Various linear algebra operations that i use often. + */ + +/* + * addmul() computes dst[] = dst[] + c * src[] + * This is used often, so better optimize it! Currently the loop is + * unrolled 16 times, a good value for 486 and pentium-class machines. + * The case c=0 is also optimized, whereas c=1 is not. These + * calls are unfrequent in my typical apps so I did not bother. + */ +#define addmul(dst, src, c, sz) \ + if (c != 0) _addmul1(dst, src, c, sz) + +#define UNROLL 16 /* 1, 4, 8, 16 */ +static void +_addmul1(register gf*restrict dst, const register gf*restrict src, gf c, size_t sz) { + USE_GF_MULC; + const gf* lim = &dst[sz - UNROLL + 1]; + + GF_MULC0 (c); + +#if (UNROLL > 1) /* unrolling by 8/16 is quite effective on the pentium */ + for (; dst < lim; dst += UNROLL, src += UNROLL) { + GF_ADDMULC (dst[0], src[0]); + GF_ADDMULC (dst[1], src[1]); + GF_ADDMULC (dst[2], src[2]); + GF_ADDMULC (dst[3], src[3]); +#if (UNROLL > 4) + GF_ADDMULC (dst[4], src[4]); + GF_ADDMULC (dst[5], src[5]); + GF_ADDMULC (dst[6], src[6]); + GF_ADDMULC (dst[7], src[7]); +#endif +#if (UNROLL > 8) + GF_ADDMULC (dst[8], src[8]); + GF_ADDMULC (dst[9], src[9]); + GF_ADDMULC (dst[10], src[10]); + GF_ADDMULC (dst[11], src[11]); + GF_ADDMULC (dst[12], src[12]); + GF_ADDMULC (dst[13], src[13]); + GF_ADDMULC (dst[14], src[14]); + GF_ADDMULC (dst[15], src[15]); +#endif + } +#endif + lim += UNROLL - 1; + for (; dst < lim; dst++, src++) /* final components */ + GF_ADDMULC (*dst, *src); +} + +/* + * computes C = AB where A is n*k, B is k*m, C is n*m + */ +static void +_matmul(gf * a, gf * b, gf * c, unsigned n, unsigned k, unsigned m) { + unsigned row, col, i; + + for (row = 0; row < n; row++) { + for (col = 0; col < m; col++) { + gf *pa = &a[row * k]; + gf *pb = &b[col]; + gf acc = 0; + for (i = 0; i < k; i++, pa++, pb += m) + acc ^= gf_mul (*pa, *pb); + c[row * m + col] = acc; + } + } +} + +/* + * _invert_mat() takes a matrix and produces its inverse + * k is the size of the matrix. + * (Gauss-Jordan, adapted from Numerical Recipes in C) + * Return non-zero if singular. + */ +static void +_invert_mat(gf* src, unsigned k) { + gf c, *p; + unsigned irow = 0; + unsigned icol = 0; + unsigned row, col, i, ix; + + unsigned* indxc = (unsigned*) malloc (k * sizeof(unsigned)); + unsigned* indxr = (unsigned*) malloc (k * sizeof(unsigned)); + unsigned* ipiv = (unsigned*) malloc (k * sizeof(unsigned)); + gf *id_row = NEW_GF_MATRIX (1, k); + + memset (id_row, '\0', k * sizeof (gf)); + /* + * ipiv marks elements already used as pivots. + */ + for (i = 0; i < k; i++) + ipiv[i] = 0; + + for (col = 0; col < k; col++) { + gf *pivot_row; + /* + * Zeroing column 'col', look for a non-zero element. + * First try on the diagonal, if it fails, look elsewhere. + */ + if (ipiv[col] != 1 && src[col * k + col] != 0) { + irow = col; + icol = col; + goto found_piv; + } + for (row = 0; row < k; row++) { + if (ipiv[row] != 1) { + for (ix = 0; ix < k; ix++) { + if (ipiv[ix] == 0) { + if (src[row * k + ix] != 0) { + irow = row; + icol = ix; + goto found_piv; + } + } else + assert (ipiv[ix] <= 1); + } + } + } + found_piv: + ++(ipiv[icol]); + /* + * swap rows irow and icol, so afterwards the diagonal + * element will be correct. Rarely done, not worth + * optimizing. + */ + if (irow != icol) + for (ix = 0; ix < k; ix++) + SWAP (src[irow * k + ix], src[icol * k + ix], gf); + indxr[col] = irow; + indxc[col] = icol; + pivot_row = &src[icol * k]; + c = pivot_row[icol]; + assert (c != 0); + if (c != 1) { /* otherwhise this is a NOP */ + /* + * this is done often , but optimizing is not so + * fruitful, at least in the obvious ways (unrolling) + */ + c = inverse[c]; + pivot_row[icol] = 1; + for (ix = 0; ix < k; ix++) + pivot_row[ix] = gf_mul (c, pivot_row[ix]); + } + /* + * from all rows, remove multiples of the selected row + * to zero the relevant entry (in fact, the entry is not zero + * because we know it must be zero). + * (Here, if we know that the pivot_row is the identity, + * we can optimize the addmul). + */ + id_row[icol] = 1; + if (memcmp (pivot_row, id_row, k * sizeof (gf)) != 0) { + for (p = src, ix = 0; ix < k; ix++, p += k) { + if (ix != icol) { + c = p[icol]; + p[icol] = 0; + addmul (p, pivot_row, c, k); + } + } + } + id_row[icol] = 0; + } /* done all columns */ + for (col = k; col > 0; col--) + if (indxr[col-1] != indxc[col-1]) + for (row = 0; row < k; row++) + SWAP (src[row * k + indxr[col-1]], src[row * k + indxc[col-1]], gf); +} + +/* + * fast code for inverting a vandermonde matrix. + * + * NOTE: It assumes that the matrix is not singular and _IS_ a vandermonde + * matrix. Only uses the second column of the matrix, containing the p_i's. + * + * Algorithm borrowed from "Numerical recipes in C" -- sec.2.8, but largely + * revised for my purposes. + * p = coefficients of the matrix (p_i) + * q = values of the polynomial (known) + */ +void +_invert_vdm (gf* src, unsigned k) { + unsigned i, j, row, col; + gf *b, *c, *p; + gf t, xx; + + if (k == 1) /* degenerate case, matrix must be p^0 = 1 */ + return; + /* + * c holds the coefficient of P(x) = Prod (x - p_i), i=0..k-1 + * b holds the coefficient for the matrix inversion + */ + c = NEW_GF_MATRIX (1, k); + b = NEW_GF_MATRIX (1, k); + + p = NEW_GF_MATRIX (1, k); + + for (j = 1, i = 0; i < k; i++, j += k) { + c[i] = 0; + p[i] = src[j]; /* p[i] */ + } + /* + * construct coeffs. recursively. We know c[k] = 1 (implicit) + * and start P_0 = x - p_0, then at each stage multiply by + * x - p_i generating P_i = x P_{i-1} - p_i P_{i-1} + * After k steps we are done. + */ + c[k - 1] = p[0]; /* really -p(0), but x = -x in GF(2^m) */ + for (i = 1; i < k; i++) { + gf p_i = p[i]; /* see above comment */ + for (j = k - 1 - (i - 1); j < k - 1; j++) + c[j] ^= gf_mul (p_i, c[j + 1]); + c[k - 1] ^= p_i; + } + + for (row = 0; row < k; row++) { + /* + * synthetic division etc. + */ + xx = p[row]; + t = 1; + b[k - 1] = 1; /* this is in fact c[k] */ + for (i = k - 1; i > 0; i--) { + b[i-1] = c[i] ^ gf_mul (xx, b[i]); + t = gf_mul (xx, t) ^ b[i-1]; + } + for (col = 0; col < k; col++) + src[col * k + row] = gf_mul (inverse[t], b[col]); + } + free (c); + free (b); + free (p); + return; +} + +static int fec_initialized = 0; +static void +init_fec (void) { + generate_gf(); + _init_mul_table(); + fec_initialized = 1; +} + +/* + * This section contains the proper FEC encoding/decoding routines. + * The encoding matrix is computed starting with a Vandermonde matrix, + * and then transforming it into a systematic matrix. + */ + +#define FEC_MAGIC 0xFECC0DEC + +void +fec_free (fec_t *p) { + assert (p != NULL && p->magic == (((FEC_MAGIC ^ p->k) ^ p->n) ^ (unsigned long) (p->enc_matrix))); + free (p->enc_matrix); + free (p); +} + +fec_t * +fec_new(unsigned short k, unsigned short n) { + unsigned row, col; + gf *p, *tmp_m; + + fec_t *retval; + + if (fec_initialized == 0) + init_fec (); + + retval = (fec_t *) malloc (sizeof (fec_t)); + retval->k = k; + retval->n = n; + retval->enc_matrix = NEW_GF_MATRIX (n, k); + retval->magic = ((FEC_MAGIC ^ k) ^ n) ^ (unsigned long) (retval->enc_matrix); + tmp_m = NEW_GF_MATRIX (n, k); + /* + * fill the matrix with powers of field elements, starting from 0. + * The first row is special, cannot be computed with exp. table. + */ + tmp_m[0] = 1; + for (col = 1; col < k; col++) + tmp_m[col] = 0; + for (p = tmp_m + k, row = 0; row < n - 1; row++, p += k) + for (col = 0; col < k; col++) + p[col] = gf_exp[modnn (row * col)]; + + /* + * quick code to build systematic matrix: invert the top + * k*k vandermonde matrix, multiply right the bottom n-k rows + * by the inverse, and construct the identity matrix at the top. + */ + _invert_vdm (tmp_m, k); /* much faster than _invert_mat */ + _matmul(tmp_m + k * k, tmp_m, retval->enc_matrix + k * k, n - k, k, k); + /* + * the upper matrix is I so do not bother with a slow multiply + */ + memset (retval->enc_matrix, '\0', k * k * sizeof (gf)); + for (p = retval->enc_matrix, col = 0; col < k; col++, p += k + 1) + *p = 1; + free (tmp_m); + + return retval; +} + +/* To make sure that we stay within cache in the inner loops of fec_encode(). (It would + probably help to also do this for fec_decode(). */ +#ifndef STRIDE +#define STRIDE 8192 +#endif + +void +fec_encode(const fec_t* code, const gf*restrict const*restrict const src, gf*restrict const*restrict const fecs, const unsigned*restrict const block_nums, size_t num_block_nums, size_t sz) { + unsigned char i, j; + size_t k; + unsigned fecnum; + const gf* p; + + for (k = 0; k < sz; k += STRIDE) { + size_t stride = ((sz-k) < STRIDE)?(sz-k):STRIDE; + for (i=0; i= code->k); + memset(fecs[i]+k, 0, stride); + p = &(code->enc_matrix[fecnum * code->k]); + for (j = 0; j < code->k; j++) + addmul(fecs[i]+k, src[j]+k, p[j], stride); + } + } +} + +/** + * Build decode matrix into some memory space. + * + * @param matrix a space allocated for a k by k matrix + */ +void +build_decode_matrix_into_space(const fec_t*restrict const code, const unsigned*const restrict index, const unsigned k, gf*restrict const matrix) { + unsigned char i; + gf* p; + for (i=0, p=matrix; i < k; i++, p += k) { + if (index[i] < k) { + memset(p, 0, k); + p[i] = 1; + } else { + memcpy(p, &(code->enc_matrix[index[i] * code->k]), k); + } + } + _invert_mat (matrix, k); +} + +void +fec_decode(const fec_t* code, const gf*restrict const*restrict const inpkts, gf*restrict const*restrict const outpkts, const unsigned*restrict const index, size_t sz) { + gf* m_dec = (gf*)alloca(code->k * code->k); + unsigned char outix=0; + unsigned char row=0; + unsigned char col=0; + build_decode_matrix_into_space(code, index, code->k, m_dec); + + for (row=0; rowk; row++) { + assert ((index[row] >= code->k) || (index[row] == row)); /* If the block whose number is i is present, then it is required to be in the i'th element. */ + if (index[row] >= code->k) { + memset(outpkts[outix], 0, sz); + for (col=0; col < code->k; col++) + addmul(outpkts[outix], inpkts[col], m_dec[row * code->k + col], sz); + outix++; + } + } +} + +/** + * zfec -- fast forward error correction library with Python interface + * + * Copyright (C) 2007-2010 Zooko Wilcox-O'Hearn + * Author: Zooko Wilcox-O'Hearn + * + * This file is part of zfec. + * + * See README.rst for licensing information. + */ + +/* + * This work is derived from the "fec" software by Luigi Rizzo, et al., the + * copyright notice and licence terms of which are included below for reference. + * fec.c -- forward error correction based on Vandermonde matrices 980624 (C) + * 1997-98 Luigi Rizzo (luigi@iet.unipi.it) + * + * Portions derived from code by Phil Karn (karn@ka9q.ampr.org), + * Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari + * Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 + * + * Modifications by Dan Rubenstein (see Modifications.txt for + * their description. + * Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, + * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + * OF SUCH DAMAGE. + */ diff --git a/zfec/fec.cabal b/zfec/fec.cabal deleted file mode 100644 index a0668eb..0000000 --- a/zfec/fec.cabal +++ /dev/null @@ -1,30 +0,0 @@ -name: fec -version: 0.1.1 -license: GPL -license-file: README.rst -author: Adam Langley -maintainer: Adam Langley -description: This code, based on zfec by Zooko, based on code by Luigi - Rizzo implements an erasure code, or forward error - correction code. The most widely known example of an erasure - code is the RAID-5 algorithm which makes it so that in the - event of the loss of any one hard drive, the stored data can - be completely recovered. The algorithm in the zfec package - has a similar effect, but instead of recovering from the loss - of only a single element, it can be parameterized to choose in - advance the number of elements whose loss it can tolerate. -build-type: Simple -homepage: http://allmydata.org/source/zfec -synopsis: Forward error correction of ByteStrings -category: Codec -build-depends: base, bytestring>=0.9 -stability: provisional -tested-with: GHC == 6.8.2 -exposed-modules: Codec.FEC -extensions: ForeignFunctionInterface -hs-source-dirs: haskell -ghc-options: -Wall -c-sources: zfec/fec.c -cc-options: -std=c99 -include-dirs: zfec -extra-source-files: zfec/fec.h, COPYING.GPL, COPYING.TGPPL.rst diff --git a/zfec/fec.h b/zfec/fec.h new file mode 100644 index 0000000..249fe66 --- /dev/null +++ b/zfec/fec.h @@ -0,0 +1,111 @@ +/** + * zfec -- fast forward error correction library with Python interface + * + * See README.rst for documentation. + */ + +#include + +typedef unsigned char gf; + +typedef struct { + unsigned long magic; + unsigned short k, n; /* parameters of the code */ + gf* enc_matrix; +} fec_t; + +#if defined(_MSC_VER) +// actually, some of the flavors (i.e. Enterprise) do support restrict +//#define restrict __restrict +#define restrict +#endif + +/** + * param k the number of blocks required to reconstruct + * param m the total number of blocks created + */ +fec_t* fec_new(unsigned short k, unsigned short m); +void fec_free(fec_t* p); + +/** + * @param inpkts the "primary blocks" i.e. the chunks of the input data + * @param fecs buffers into which the secondary blocks will be written + * @param block_nums the numbers of the desired check blocks (the id >= k) which fec_encode() will produce and store into the buffers of the fecs parameter + * @param num_block_nums the length of the block_nums array + * @param sz size of a packet in bytes + */ +void fec_encode(const fec_t* code, const gf*restrict const*restrict const src, gf*restrict const*restrict const fecs, const unsigned*restrict const block_nums, size_t num_block_nums, size_t sz); + +/** + * @param inpkts an array of packets (size k); If a primary block, i, is present then it must be at index i. Secondary blocks can appear anywhere. + * @param outpkts an array of buffers into which the reconstructed output packets will be written (only packets which are not present in the inpkts input will be reconstructed and written to outpkts) + * @param index an array of the blocknums of the packets in inpkts + * @param sz size of a packet in bytes + */ +void fec_decode(const fec_t* code, const gf*restrict const*restrict const inpkts, gf*restrict const*restrict const outpkts, const unsigned*restrict const index, size_t sz); + +#if defined(_MSC_VER) +#define alloca _alloca +#else +#ifdef __GNUC__ +#ifndef alloca +#define alloca(x) __builtin_alloca(x) +#endif +#else +#include +#endif +#endif + +/** + * zfec -- fast forward error correction library with Python interface + * + * Copyright (C) 2007-2008 Allmydata, Inc. + * Author: Zooko Wilcox-O'Hearn + * + * This file is part of zfec. + * + * See README.rst for licensing information. + */ + +/* + * Much of this work is derived from the "fec" software by Luigi Rizzo, et + * al., the copyright notice and licence terms of which are included below + * for reference. + * + * fec.h -- forward error correction based on Vandermonde matrices + * 980614 + * (C) 1997-98 Luigi Rizzo (luigi@iet.unipi.it) + * + * Portions derived from code by Phil Karn (karn@ka9q.ampr.org), + * Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari + * Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 + * + * Modifications by Dan Rubenstein (see Modifications.txt for + * their description. + * Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, + * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + * OF SUCH DAMAGE. + */ + diff --git a/zfec/filefec.py b/zfec/filefec.py new file mode 100644 index 0000000..48f3d9d --- /dev/null +++ b/zfec/filefec.py @@ -0,0 +1,504 @@ +import easyfec, zfec +from pyutil import fileutil +from pyutil.mathutil import pad_size, log_ceil + +import array, os, struct + +CHUNKSIZE = 4096 + +from base64 import b32encode +def ab(x): # debuggery + if len(x) >= 3: + return "%s:%s" % (len(x), b32encode(x[-3:]),) + elif len(x) == 2: + return "%s:%s" % (len(x), b32encode(x[-2:]),) + elif len(x) == 1: + return "%s:%s" % (len(x), b32encode(x[-1:]),) + elif len(x) == 0: + return "%s:%s" % (len(x), "--empty--",) + +class InsufficientShareFilesError(zfec.Error): + def __init__(self, k, kb, *args, **kwargs): + zfec.Error.__init__(self, *args, **kwargs) + self.k = k + self.kb = kb + + def __repr__(self): + return "Insufficient share files -- %d share files are required to recover this file, but only %d were given" % (self.k, self.kb,) + + def __str__(self): + return self.__repr__() + +class CorruptedShareFilesError(zfec.Error): + pass + +def _build_header(m, k, pad, sh): + """ + @param m: the total number of shares; 1 <= m <= 256 + @param k: the number of shares required to reconstruct; 1 <= k <= m + @param pad: the number of bytes of padding added to the file before encoding; 0 <= pad < k + @param sh: the shnum of this share; 0 <= k < m + + @return: a compressed string encoding m, k, pad, and sh + """ + assert m >= 1 + assert m <= 2**8 + assert k >= 1 + assert k <= m + assert pad >= 0 + assert pad < k + + assert sh >= 0 + assert sh < m + + bitsused = 0 + val = 0 + + val |= (m - 1) + bitsused += 8 # the first 8 bits always encode m + + kbits = log_ceil(m, 2) # num bits needed to store all possible values of k + val <<= kbits + bitsused += kbits + + val |= (k - 1) + + padbits = log_ceil(k, 2) # num bits needed to store all possible values of pad + val <<= padbits + bitsused += padbits + + val |= pad + + shnumbits = log_ceil(m, 2) # num bits needed to store all possible values of shnum + val <<= shnumbits + bitsused += shnumbits + + val |= sh + + assert bitsused >= 8, bitsused + assert bitsused <= 32, bitsused + + if bitsused <= 16: + val <<= (16-bitsused) + cs = struct.pack('>H', val) + assert cs[:-2] == '\x00' * (len(cs)-2) + return cs[-2:] + if bitsused <= 24: + val <<= (24-bitsused) + cs = struct.pack('>I', val) + assert cs[:-3] == '\x00' * (len(cs)-3) + return cs[-3:] + else: + val <<= (32-bitsused) + cs = struct.pack('>I', val) + assert cs[:-4] == '\x00' * (len(cs)-4) + return cs[-4:] + +def MASK(bits): + return (1<> b2_bits_left) + 1 + + shbits = log_ceil(m, 2) # num bits needed to store all possible values of shnum + padbits = log_ceil(k, 2) # num bits needed to store all possible values of pad + + val = byte & (~kbitmask) + + needed_padbits = padbits - b2_bits_left + if needed_padbits > 0: + ch = inf.read(1) + if not ch: + raise CorruptedShareFilesError("Share files were corrupted -- share file %r didn't have a complete metadata header at the front. Perhaps the file was truncated." % (inf.name,)) + byte = struct.unpack(">B", ch)[0] + val <<= 8 + val |= byte + needed_padbits -= 8 + assert needed_padbits <= 0 + extrabits = -needed_padbits + pad = val >> extrabits + val &= MASK(extrabits) + + needed_shbits = shbits - extrabits + if needed_shbits > 0: + ch = inf.read(1) + if not ch: + raise CorruptedShareFilesError("Share files were corrupted -- share file %r didn't have a complete metadata header at the front. Perhaps the file was truncated." % (inf.name,)) + byte = struct.unpack(">B", ch)[0] + val <<= 8 + val |= byte + needed_shbits -= 8 + assert needed_shbits <= 0 + + gotshbits = -needed_shbits + + sh = val >> gotshbits + + return (m, k, pad, sh,) + +FORMAT_FORMAT = "%%s.%%0%dd_%%0%dd%%s" +RE_FORMAT = "%s.[0-9]+_[0-9]+%s" +def encode_to_files(inf, fsize, dirname, prefix, k, m, suffix=".fec", overwrite=False, verbose=False): + """ + Encode inf, writing the shares to specially named, newly created files. + + @param fsize: calling read() on inf must yield fsize bytes of data and + then raise an EOFError + @param dirname: the name of the directory into which the sharefiles will + be written + """ + mlen = len(str(m)) + format = FORMAT_FORMAT % (mlen, mlen,) + + padbytes = pad_size(fsize, k) + + fns = [] + fs = [] + try: + for shnum in range(m): + hdr = _build_header(m, k, padbytes, shnum) + + fn = os.path.join(dirname, format % (prefix, shnum, m, suffix,)) + if verbose: + print "Creating share file %r..." % (fn,) + if overwrite: + f = open(fn, "wb") + else: + flags = os.O_WRONLY|os.O_CREAT|os.O_EXCL | (hasattr(os, 'O_BINARY') and os.O_BINARY) + fd = os.open(fn, flags) + f = os.fdopen(fd, "wb") + f.write(hdr) + fs.append(f) + fns.append(fn) + sumlen = [0] + def cb(blocks, length): + assert len(blocks) == len(fs) + oldsumlen = sumlen[0] + sumlen[0] += length + if verbose: + if int((float(oldsumlen) / fsize) * 10) != int((float(sumlen[0]) / fsize) * 10): + print str(int((float(sumlen[0]) / fsize) * 10) * 10) + "% ...", + + if sumlen[0] > fsize: + raise IOError("Wrong file size -- possibly the size of the file changed during encoding. Original size: %d, observed size at least: %s" % (fsize, sumlen[0],)) + for i in range(len(blocks)): + data = blocks[i] + fs[i].write(data) + length -= len(data) + + encode_file_stringy_easyfec(inf, cb, k, m, chunksize=4096) + except EnvironmentError, le: + print "Cannot complete because of exception: " + print le + print "Cleaning up..." + # clean up + while fs: + f = fs.pop() + f.close() ; del f + fn = fns.pop() + if verbose: + print "Cleaning up: trying to remove %r..." % (fn,) + fileutil.remove_if_possible(fn) + return 1 + if verbose: + print + print "Done!" + return 0 + +# Note: if you really prefer base-2 and you change this code, then please +# denote 2^20 as "MiB" instead of "MB" in order to avoid ambiguity. See: +# http://en.wikipedia.org/wiki/Megabyte +# Thanks. +MILLION_BYTES=10**6 + +def decode_from_files(outf, infiles, verbose=False): + """ + Decode from the first k files in infiles, writing the results to outf. + """ + assert len(infiles) >= 2 + infs = [] + shnums = [] + m = None + k = None + padlen = None + + byteswritten = 0 + for f in infiles: + (nm, nk, npadlen, shnum,) = _parse_header(f) + if not (m is None or m == nm): + raise CorruptedShareFilesError("Share files were corrupted -- share file %r said that m was %s but another share file previously said that m was %s" % (f.name, nm, m,)) + m = nm + if not (k is None or k == nk): + raise CorruptedShareFilesError("Share files were corrupted -- share file %r said that k was %s but another share file previously said that k was %s" % (f.name, nk, k,)) + if k > len(infiles): + raise InsufficientShareFilesError(k, len(infiles)) + k = nk + if not (padlen is None or padlen == npadlen): + raise CorruptedShareFilesError("Share files were corrupted -- share file %r said that pad length was %s but another share file previously said that pad length was %s" % (f.name, npadlen, padlen,)) + padlen = npadlen + + infs.append(f) + shnums.append(shnum) + + if len(infs) == k: + break + + dec = easyfec.Decoder(k, m) + + while True: + chunks = [ inf.read(CHUNKSIZE) for inf in infs ] + if [ch for ch in chunks if len(ch) != len(chunks[-1])]: + raise CorruptedShareFilesError("Share files were corrupted -- all share files are required to be the same length, but they weren't.") + + if len(chunks[-1]) == CHUNKSIZE: + # Then this was a full read, so we're still in the sharefiles. + resultdata = dec.decode(chunks, shnums, padlen=0) + outf.write(resultdata) + byteswritten += len(resultdata) + if verbose: + if ((byteswritten - len(resultdata)) / (10*MILLION_BYTES)) != (byteswritten / (10*MILLION_BYTES)): + print str(byteswritten / MILLION_BYTES) + " MB ...", + else: + # Then this was a short read, so we've reached the end of the sharefiles. + resultdata = dec.decode(chunks, shnums, padlen) + outf.write(resultdata) + return # Done. + if verbose: + print + print "Done!" + +def encode_file(inf, cb, k, m, chunksize=4096): + """ + Read in the contents of inf, encode, and call cb with the results. + + First, k "input blocks" will be read from inf, each input block being of + size chunksize. Then these k blocks will be encoded into m "result + blocks". Then cb will be invoked, passing a list of the m result blocks + as its first argument, and the length of the encoded data as its second + argument. (The length of the encoded data is always equal to k*chunksize, + until the last iteration, when the end of the file has been reached and + less than k*chunksize bytes could be read from the file.) This procedure + is iterated until the end of the file is reached, in which case the space + of the input blocks that is unused is filled with zeroes before encoding. + + Note that the sequence passed in calls to cb() contains mutable array + objects in its first k elements whose contents will be overwritten when + the next segment is read from the input file. Therefore the + implementation of cb() has to either be finished with those first k arrays + before returning, or if it wants to keep the contents of those arrays for + subsequent use after it has returned then it must make a copy of them to + keep. + + @param inf the file object from which to read the data + @param cb the callback to be invoked with the results + @param k the number of shares required to reconstruct the file + @param m the total number of shares created + @param chunksize how much data to read from inf for each of the k input + blocks + """ + enc = zfec.Encoder(k, m) + l = tuple([ array.array('c') for i in range(k) ]) + indatasize = k*chunksize # will be reset to shorter upon EOF + eof = False + ZEROES=array.array('c', ['\x00'])*chunksize + while not eof: + # This loop body executes once per segment. + i = 0 + while (i CUInt -- ^ n - -> IO (Ptr CFEC) -foreign import ccall unsafe "&fec_free" _free :: FunPtr (Ptr CFEC -> IO ()) -foreign import ccall unsafe "fec_encode" _encode :: Ptr CFEC - -> Ptr (Ptr Word8) -- ^ primary blocks - -> Ptr (Ptr Word8) -- ^ (output) secondary blocks - -> Ptr CUInt -- ^ array of secondary block ids - -> CSize -- ^ length of previous - -> CSize -- ^ block length - -> IO () -foreign import ccall unsafe "fec_decode" _decode :: Ptr CFEC - -> Ptr (Ptr Word8) -- ^ input blocks - -> Ptr (Ptr Word8) -- ^ output blocks - -> Ptr CUInt -- ^ array of input indexes - -> CSize -- ^ block length - -> IO () - --- | Return true if the given @k@ and @n@ values are valid -isValidConfig :: Int -> Int -> Bool -isValidConfig k n - | k >= n = False - | k < 1 = False - | n < 1 = False - | n > 255 = False - | otherwise = True - --- | Return a FEC with the given parameters. -fec :: Int -- ^ the number of primary blocks - -> Int -- ^ the total number blocks, must be < 256 - -> FECParams -fec k n = - if not (isValidConfig k n) - then error $ "Invalid FEC parameters: " ++ show k ++ " " ++ show n - else unsafePerformIO (do - cfec <- _new (fromIntegral k) (fromIntegral n) - params <- newForeignPtr _free cfec - return $ FECParams params k n) - --- | Create a C array of unsigned from an input array -uintCArray :: [Int] -> ((Ptr CUInt) -> IO a) -> IO a -uintCArray xs f = withArray (map fromIntegral xs) f - --- | Convert a list of ByteStrings to an array of pointers to their data -byteStringsToArray :: [B.ByteString] -> ((Ptr (Ptr Word8)) -> IO a) -> IO a -byteStringsToArray inputs f = do - let l = length inputs - allocaBytes (l * sizeOf (undefined :: Ptr Word8)) (\array -> do - let inner _ [] = f array - inner array' (bs : bss) = BU.unsafeUseAsCString bs (\ptr -> do - poke array' $ castPtr ptr - inner (advancePtr array' 1) bss) - inner array inputs) - --- | Return True iff all the given ByteStrings are the same length -allByteStringsSameLength :: [B.ByteString] -> Bool -allByteStringsSameLength [] = True -allByteStringsSameLength (bs : bss) = all ((==) (B.length bs)) $ map B.length bss - --- | Run the given function with a pointer to an array of @n@ pointers to --- buffers of size @size@. Return these buffers as a list of ByteStrings -createByteStringArray :: Int -- ^ the number of buffers requested - -> Int -- ^ the size of each buffer - -> ((Ptr (Ptr Word8)) -> IO ()) - -> IO [B.ByteString] -createByteStringArray n size f = do - allocaBytes (n * sizeOf (undefined :: Ptr Word8)) (\array -> do - allocaBytes (n * size) (\ptr -> do - mapM_ (\i -> poke (advancePtr array i) (advancePtr ptr (size * i))) [0..(n - 1)] - f array - mapM (\i -> B.packCStringLen (castPtr $ advancePtr ptr (i * size), size)) [0..(n - 1)])) - --- | Generate the secondary blocks from a list of the primary blocks. The --- primary blocks must be in order and all of the same size. There must be --- @k@ primary blocks. -encode :: FECParams - -> [B.ByteString] -- ^ a list of @k@ input blocks - -> [B.ByteString] -- ^ (n - k) output blocks -encode (FECParams params k n) inblocks - | length inblocks /= k = error "Wrong number of blocks to FEC encode" - | not (allByteStringsSameLength inblocks) = error "Not all inputs to FEC encode are the same length" - | otherwise = unsafePerformIO (do - let sz = B.length $ head inblocks - withForeignPtr params (\cfec -> do - byteStringsToArray inblocks (\src -> do - createByteStringArray (n - k) sz (\fecs -> do - uintCArray [k..(n - 1)] (\block_nums -> do - _encode cfec src fecs block_nums (fromIntegral (n - k)) $ fromIntegral sz))))) - --- | A sort function for tagged assoc lists -sortTagged :: [(Int, a)] -> [(Int, a)] -sortTagged = sortBy (\a b -> compare (fst a) (fst b)) - --- | Reorder the given list so that elements with tag numbers < the first --- argument have an index equal to their tag number (if possible) -reorderPrimaryBlocks :: Int -> [(Int, a)] -> [(Int, a)] -reorderPrimaryBlocks n blocks = inner (sortTagged pBlocks) sBlocks [] where - (pBlocks, sBlocks) = partition (\(tag, _) -> tag < n) blocks - inner [] sBlocks acc = acc ++ sBlocks - inner pBlocks [] acc = acc ++ pBlocks - inner pBlocks@((tag, a) : ps) sBlocks@(s : ss) acc = - if length acc == tag - then inner ps sBlocks (acc ++ [(tag, a)]) - else inner pBlocks ss (acc ++ [s]) - --- | Recover the primary blocks from a list of @k@ blocks. Each block must be --- tagged with its number (see the module comments about block numbering) -decode :: FECParams - -> [(Int, B.ByteString)] -- ^ a list of @k@ blocks and their index - -> [B.ByteString] -- ^ a list the @k@ primary blocks -decode (FECParams params k n) inblocks - | length (nub $ map fst inblocks) /= length (inblocks) = error "Duplicate input blocks in FEC decode" - | any (\f -> f < 0 || f >= n) $ map fst inblocks = error "Invalid block numbers in FEC decode" - | length inblocks /= k = error "Wrong number of blocks to FEC decode" - | not (allByteStringsSameLength $ map snd inblocks) = error "Not all inputs to FEC decode are same length" - | otherwise = unsafePerformIO (do - let sz = B.length $ snd $ head inblocks - inblocks' = reorderPrimaryBlocks k inblocks - presentBlocks = map fst inblocks' - withForeignPtr params (\cfec -> do - byteStringsToArray (map snd inblocks') (\src -> do - b <- createByteStringArray (n - k) sz (\out -> do - uintCArray presentBlocks (\block_nums -> do - _decode cfec src out block_nums $ fromIntegral sz)) - let blocks = [0..(n - 1)] \\ presentBlocks - tagged = zip blocks b - allBlocks = sortTagged $ tagged ++ inblocks' - return $ take k $ map snd allBlocks))) - --- | Break a ByteString into @n@ parts, equal in length to the original, such --- that all @n@ are required to reconstruct the original, but having less --- than @n@ parts reveals no information about the orginal. --- --- This code works in IO monad because it needs a source of random bytes, --- which it gets from /dev/urandom. If this file doesn't exist an --- exception results --- --- Not terribly fast - probably best to do it with short inputs (e.g. an --- encryption key) -secureDivide :: Int -- ^ the number of parts requested - -> B.ByteString -- ^ the data to be split - -> IO [B.ByteString] -secureDivide n input - | n < 0 = error "secureDivide called with negative number of parts" - | otherwise = withFile "/dev/urandom" ReadMode (\handle -> do - let inner 1 bs = return [bs] - inner n bs = do - mask <- B.hGet handle (B.length bs) - let masked = B.pack $ B.zipWith xor bs mask - rest <- inner (n - 1) masked - return (mask : rest) - inner n input) - --- | Reverse the operation of secureDivide. The order of the inputs doesn't --- matter, but they must all be the same length -secureCombine :: [B.ByteString] -> B.ByteString -secureCombine [] = error "Passed empty list of inputs to secureCombine" -secureCombine [a] = a -secureCombine [a, b] = B.pack $ B.zipWith xor a b -secureCombine (a : rest) = B.pack $ B.zipWith xor a $ secureCombine rest - --- | A utility function which takes an arbitary input and FEC encodes it into a --- number of blocks. The order the resulting blocks doesn't matter so long --- as you have enough to present to @deFEC@. -enFEC :: Int -- ^ the number of blocks required to reconstruct - -> Int -- ^ the total number of blocks - -> B.ByteString -- ^ the data to divide - -> [B.ByteString] -- ^ the resulting blocks -enFEC k n input = taggedPrimaryBlocks ++ taggedSecondaryBlocks where - taggedPrimaryBlocks = map (uncurry B.cons) $ zip [0..] primaryBlocks - taggedSecondaryBlocks = map (uncurry B.cons) $ zip [(fromIntegral k)..] secondaryBlocks - remainder = B.length input `mod` k - paddingLength = if remainder >= 1 then (k - remainder) else k - paddingBytes = (B.replicate (paddingLength - 1) 0) `B.append` (B.singleton $ fromIntegral paddingLength) - divide a bs - | B.null bs = [] - | otherwise = (B.take a bs) : (divide a $ B.drop a bs) - input' = input `B.append` paddingBytes - blockSize = B.length input' `div` k - primaryBlocks = divide blockSize input' - secondaryBlocks = encode params primaryBlocks - params = fec k n - --- | Reverses the operation of @enFEC@. -deFEC :: Int -- ^ the number of blocks required (matches call to @enFEC@) - -> Int -- ^ the total number of blocks (matches call to @enFEC@) - -> [B.ByteString] -- ^ a list of k, or more, blocks from @enFEC@ - -> B.ByteString -deFEC k n inputs - | length inputs < k = error "Too few inputs to deFEC" - | otherwise = B.take (B.length fecOutput - paddingLength) fecOutput where - paddingLength = fromIntegral $ B.last fecOutput - inputs' = take k inputs - taggedInputs = map (\bs -> (fromIntegral $ B.head bs, B.tail bs)) inputs' - fecOutput = B.concat $ decode params taggedInputs - params = fec k n diff --git a/zfec/haskell/test/FECTest.hs b/zfec/haskell/test/FECTest.hs deleted file mode 100644 index 0f5a353..0000000 --- a/zfec/haskell/test/FECTest.hs +++ /dev/null @@ -1,57 +0,0 @@ -module Main where - -import qualified Data.ByteString as B -import qualified Codec.FEC as FEC -import System.IO (withFile, IOMode(..)) -import System.Random -import Data.List (sortBy) - -import Test.QuickCheck - --- | Return true if the given @k@ and @n@ values are valid -isValidConfig :: Int -> Int -> Bool -isValidConfig k n - | k >= n = False - | k < 1 = False - | n < 1 = False - | otherwise = True - -randomTake :: Int -> Int -> [a] -> [a] -randomTake seed n values = map snd $ take n sortedValues where - sortedValues = sortBy (\a b -> compare (fst a) (fst b)) taggedValues - taggedValues = zip rnds values - rnds :: [Float] - rnds = randoms gen - gen = mkStdGen seed - -testFEC k n len seed = FEC.decode fec someTaggedBlocks == origBlocks where - origBlocks = map (\i -> B.replicate len $ fromIntegral i) [0..(k - 1)] - fec = FEC.fec k n - secondaryBlocks = FEC.encode fec origBlocks - taggedBlocks = zip [0..] (origBlocks ++ secondaryBlocks) - someTaggedBlocks = randomTake seed k taggedBlocks - -prop_FEC :: Int -> Int -> Int -> Int -> Property -prop_FEC k n len seed = - isValidConfig k n && n < 256 && len < 1024 ==> testFEC k n len seed - -checkDivide :: Int -> IO () -checkDivide n = do - let input = B.replicate 1024 65 - parts <- FEC.secureDivide n input - if FEC.secureCombine parts == input - then return () - else fail "checkDivide failed" - -checkEnFEC :: Int -> IO () -checkEnFEC len = do - testdata <- withFile "/dev/urandom" ReadMode (\handle -> B.hGet handle len) - let [a, b, c, d, e] = FEC.enFEC 3 5 testdata - if FEC.deFEC 3 5 [b, e, d] == testdata - then return () - else fail "deFEC failure" - -main = do - mapM_ (check (defaultConfig { configMaxTest = 1000, configMaxFail = 10000 })) [prop_FEC] - mapM_ checkDivide [1, 2, 3, 4, 10] - mapM_ checkEnFEC [1, 2, 3, 4, 5, 1024 * 1024] diff --git a/zfec/setup.cfg b/zfec/setup.cfg deleted file mode 100644 index 620b349..0000000 --- a/zfec/setup.cfg +++ /dev/null @@ -1,7 +0,0 @@ -[easy_install] -# zfec actually does work at least as well as any package -# works when zipped, but zipping eggs causes various problems -# (http://bugs.python.org/setuptools/issue33 ), and generally makes it -# harder for people to get at the source code, and doesn't actually -# provide any benefits that I am aware of. -zip_ok=False diff --git a/zfec/setup.py b/zfec/setup.py deleted file mode 100755 index 8a1a5b3..0000000 --- a/zfec/setup.py +++ /dev/null @@ -1,199 +0,0 @@ - -# zfec -- fast forward error correction library with Python interface -# -# copyright © 2007-2013 Zooko Wilcox-O'Hearn -# -# This file is part of zfec. -# -# See README.rst for licensing information. - -import glob, os, re, sys - -egg = os.path.realpath(glob.glob('setuptools-*.egg')[0]) -sys.path.insert(0, egg) -import setuptools; setuptools.bootstrap_install_from = egg - -from setuptools import Extension, find_packages, setup - -if "--debug" in sys.argv: - DEBUGMODE=True - sys.argv.remove("--debug") -else: - DEBUGMODE=False - -extra_compile_args=[] -extra_link_args=[] - -extra_compile_args.append("-std=c99") - -define_macros=[] -undef_macros=[] - -for arg in sys.argv: - if arg.startswith("--stride="): - stride = int(arg[len("--stride="):]) - define_macros.append(('STRIDE', stride)) - sys.argv.remove(arg) - break - -if DEBUGMODE: - extra_compile_args.append("-O0") - extra_compile_args.append("-g") - extra_compile_args.append("-Wall") - extra_link_args.append("-g") - undef_macros.append('NDEBUG') - -trove_classifiers=[ - "Development Status :: 5 - Production/Stable", - "Environment :: Console", - "License :: OSI Approved :: GNU General Public License (GPL)", # See README.rst for alternative licensing. - "License :: DFSG approved", - "License :: Other/Proprietary License", - "Intended Audience :: Developers", - "Intended Audience :: End Users/Desktop", - "Intended Audience :: System Administrators", - "Operating System :: Microsoft", - "Operating System :: Microsoft :: Windows", - "Operating System :: Unix", - "Operating System :: POSIX :: Linux", - "Operating System :: POSIX", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows :: Windows NT/2000", - "Operating System :: OS Independent", - "Natural Language :: English", - "Programming Language :: C", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.4", - "Programming Language :: Python :: 2.5", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Topic :: Utilities", - "Topic :: System :: Systems Administration", - "Topic :: System :: Filesystems", - "Topic :: System :: Distributed Computing", - "Topic :: Software Development :: Libraries", - "Topic :: Communications :: Usenet News", - "Topic :: System :: Archiving :: Backup", - "Topic :: System :: Archiving :: Mirroring", - "Topic :: System :: Archiving", - ] - -PKG = "zfec" -VERSIONFILE = os.path.join(PKG, "_version.py") -verstr = "unknown" -try: - verstrline = open(VERSIONFILE, "rt").read() -except EnvironmentError: - pass # Okay, there is no version file. -else: - VSRE = r"^verstr = ['\"]([^'\"]*)['\"]" - mo = re.search(VSRE, verstrline, re.M) - if mo: - verstr = mo.group(1) - else: - print "unable to find version in %s" % (VERSIONFILE,) - raise RuntimeError("if %s.py exists, it is required to be well-formed" % (VERSIONFILE,)) - -setup_requires = [] -tests_require = [] - -tests_require.append("pyutil >= 1.3.19") - -# darcsver is needed only if you want "./setup.py darcsver" to write a new -# version stamp in pyutil/_version.py, with a version number derived from -# darcs history. http://pypi.python.org/pypi/darcsver -if 'darcsver' in sys.argv[1:]: - setup_requires.append('darcsver >= 1.0.0') - -# setuptools_darcs is required to produce complete distributions (such -# as with "sdist" or "bdist_egg"), unless there is a -# zfec.egg-info/SOURCE.txt file present which contains a complete -# list of files that should be included. -# http://pypi.python.org/pypi/setuptools_darcs - -# However, requiring it runs afoul of a bug in Distribute, which was -# shipped in Ubuntu Lucid, so for now you have to manually install it -# before building sdists or eggs: -# http://bitbucket.org/tarek/distribute/issue/55/revision-control-plugin-automatically-installed-as-a-build-dependency-is-not-present-when-another-build-dependency-is-being -if False: - setup_requires.append('setuptools_darcs >= 1.1.0') - - -# setuptools_trial is needed if you want "./setup.py trial" or -# "./setup.py test" to execute the tests. -# http://pypi.python.org/pypi/setuptools_trial -if 'trial' in sys.argv[1:]: - setup_requires.extend(['setuptools_trial >= 0.5']) - -# trialcoverage is required if you want the "trial" unit test runner to have a -# "--reporter=bwverbose-coverage" option which produces code-coverage results. -if "--reporter=bwverbose-coverage" in sys.argv: - tests_require.append('trialcoverage >= 0.3.3') - tests_require.append('twisted >= 2.4.0') - tests_require.append('setuptools_trial >= 0.5') - -# stdeb is required to build Debian dsc files. -if "sdist_dsc" in sys.argv: - setup_requires.append('stdeb') - -data_fnames=[ 'COPYING.GPL', 'changelog', 'COPYING.TGPPL.rst', 'TODO', 'README.rst' ] - -# In case we are building for a .deb with stdeb's sdist_dsc command, we put the -# docs in "share/doc/$PKG". -doc_loc = "share/doc/" + PKG -data_files = [(doc_loc, data_fnames)] - -readmetext = open('README.rst').read() -if readmetext[:3] == '\xef\xbb\xbf': - # utf-8 "BOM" - readmetext = readmetext[3:] - -try: - readmetext = readmetext.decode('utf-8') -except UnicodeDecodeError: - pass - -install_requires=["pyutil >= 1.3.19"] - -# argparse comes built into Python >= 2.7, and is provided by the "argparse" -# distribution for earlier versions of Python. -try: - import argparse - argparse # hush pyflakes -except ImportError: - install_requires.append("argparse >= 0.8") - -# distutils in Python 2.4 has a bug in that it tries to encode the long -# description into ascii. We detect the resulting exception and try again -# after squashing the long description (lossily) into ascii. - -def _setup(longdescription): - setup(name=PKG, - version=verstr, - description='a fast erasure codec which can be used with the command-line, C, Python, or Haskell', - long_description=longdescription, - author='Zooko O\'Whielacronx', - author_email='zooko@zooko.com', - url='https://tahoe-lafs.org/trac/'+PKG, - license='GNU GPL', # See README.rst for alternative licensing. - install_requires=install_requires, - tests_require=tests_require, - packages=find_packages(), - include_package_data=True, - data_files=data_files, - setup_requires=setup_requires, - classifiers=trove_classifiers, - entry_points = { 'console_scripts': [ 'zfec = %s.cmdline_zfec:main' % PKG, 'zunfec = %s.cmdline_zunfec:main' % PKG ] }, - ext_modules=[Extension(PKG+'._fec', [PKG+'/fec.c', PKG+'/_fecmodule.c',], extra_link_args=extra_link_args, extra_compile_args=extra_compile_args, undef_macros=undef_macros, define_macros=define_macros),], - test_suite=PKG+".test", - zip_safe=False, # I prefer unzipped for easier access. - extras_require={ - 'ed25519=ba95497adf4db8e17f688c0979003c48c76897d60e2d2193f938b9ab62115f59':[], - }, - ) - -try: - _setup(readmetext) -except UnicodeEncodeError: - _setup(repr(readmetext)) diff --git a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/PKG-INFO b/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/PKG-INFO deleted file mode 100644 index c9df489..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/PKG-INFO +++ /dev/null @@ -1,100 +0,0 @@ -Metadata-Version: 1.0 -Name: setuptools -Version: 0.6c16dev3 -Summary: Download, build, install, upgrade, and uninstall Python packages -- easily! (zetuptoolz fork) -Home-page: http://pypi.python.org/pypi/setuptools -Author: Phillip J. Eby -Author-email: distutils-sig@python.org -License: PSF or ZPL -Description: ====================== - This is not Setuptools - ====================== - - This is the ``zetuptoolz`` fork of setuptools, which is used to install - `Tahoe-LAFS`_. It has a `darcs source repository`_ and `issue tracker`_. - - For a list of differences between this fork and setuptools, see zetuptoolz.txt. - - Note that, to avoid interfering with any setuptools installation, zetuptoolz - does not install a script called ``easy_install``. There is an ``easy_install_z`` - script, but that is intended only for developers to test differences between - setuptools and zetuptoolz. - - .. _Tahoe-LAFS: http://tahoe-lafs.org/ - .. _darcs source repository: http://tahoe-lafs.org/source/zetuptoolz/trunk - .. _issue tracker: http://tahoe-lafs.org/trac/zetuptoolz - - - -------------------------------- - Using Setuptools and EasyInstall - -------------------------------- - - Here are some of the available manuals, tutorials, and other resources for - learning about Setuptools, Python Eggs, and EasyInstall: - - * `The EasyInstall user's guide and reference manual`_ - * `The setuptools Developer's Guide`_ - * `The pkg_resources API reference`_ - * `Package Compatibility Notes`_ (user-maintained) - * `The Internal Structure of Python Eggs`_ - - Questions, comments, and bug reports should be directed to the `distutils-sig - mailing list`_. If you have written (or know of) any tutorials, documentation, - plug-ins, or other resources for setuptools users, please let us know about - them there, so this reference list can be updated. If you have working, - *tested* patches to correct problems or add features, you may submit them to - the `setuptools bug tracker`_. - - .. _setuptools bug tracker: http://bugs.python.org/setuptools/ - .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes - .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats - .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools - .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources - .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall - .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - - ------- - Credits - ------- - - * The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - - * Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - - * Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - - * Phillip J. Eby is the principal author and maintainer of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - - * Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - - .. _files: - -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities diff --git a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/SOURCES.txt b/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/SOURCES.txt deleted file mode 100644 index 07bd4ff..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/SOURCES.txt +++ /dev/null @@ -1,44 +0,0 @@ -README.txt -easy_install.py -pkg_resources.py -setup.cfg -setup.py -setuptools/__init__.py -setuptools/archive_util.py -setuptools/depends.py -setuptools/dist.py -setuptools/extension.py -setuptools/package_index.py -setuptools/sandbox.py -setuptools/site-patch.py -setuptools.egg-info/PKG-INFO -setuptools.egg-info/SOURCES.txt -setuptools.egg-info/dependency_links.txt -setuptools.egg-info/entry_points.txt -setuptools.egg-info/top_level.txt -setuptools.egg-info/zip-safe -setuptools/command/__init__.py -setuptools/command/alias.py -setuptools/command/bdist_egg.py -setuptools/command/bdist_rpm.py -setuptools/command/bdist_wininst.py -setuptools/command/build_ext.py -setuptools/command/build_py.py -setuptools/command/develop.py -setuptools/command/easy_install.py -setuptools/command/egg_info.py -setuptools/command/install.py -setuptools/command/install_egg_info.py -setuptools/command/install_lib.py -setuptools/command/install_scripts.py -setuptools/command/register.py -setuptools/command/rotate.py -setuptools/command/saveopts.py -setuptools/command/scriptsetup.py -setuptools/command/sdist.py -setuptools/command/setopt.py -setuptools/command/test.py -setuptools/command/upload.py -setuptools/tests/__init__.py -setuptools/tests/test_packageindex.py -setuptools/tests/test_resources.py \ No newline at end of file diff --git a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/dependency_links.txt b/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/entry_points.txt b/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/entry_points.txt deleted file mode 100644 index 0a31ba0..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/entry_points.txt +++ /dev/null @@ -1,59 +0,0 @@ -[distutils.commands] -bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm -rotate = setuptools.command.rotate:rotate -develop = setuptools.command.develop:develop -setopt = setuptools.command.setopt:setopt -build_py = setuptools.command.build_py:build_py -scriptsetup = setuptools.command.scriptsetup:scriptsetup -saveopts = setuptools.command.saveopts:saveopts -egg_info = setuptools.command.egg_info:egg_info -register = setuptools.command.register:register -install_egg_info = setuptools.command.install_egg_info:install_egg_info -alias = setuptools.command.alias:alias -easy_install = setuptools.command.easy_install:easy_install -install_scripts = setuptools.command.install_scripts:install_scripts -bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst -bdist_egg = setuptools.command.bdist_egg:bdist_egg -install = setuptools.command.install:install -test = setuptools.command.test:test -install_lib = setuptools.command.install_lib:install_lib -build_ext = setuptools.command.build_ext:build_ext -sdist = setuptools.command.sdist:sdist - -[egg_info.writers] -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -requires.txt = setuptools.command.egg_info:write_requirements -PKG-INFO = setuptools.command.egg_info:write_pkg_info -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -top_level.txt = setuptools.command.egg_info:write_toplevel_names -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg -entry_points.txt = setuptools.command.egg_info:write_entries -depends.txt = setuptools.command.egg_info:warn_depends_obsolete - -[console_scripts] -easy_install_z-2.6 = setuptools.command.easy_install:main -easy_install_z = setuptools.command.easy_install:main - -[setuptools.file_finders] -svn_cvs = setuptools.command.sdist:_default_revctrl - -[distutils.setup_keywords] -dependency_links = setuptools.dist:assert_string_list -entry_points = setuptools.dist:check_entry_points -extras_require = setuptools.dist:check_extras -test_runner = setuptools.dist:check_importable -package_data = setuptools.dist:check_package_data -install_requires = setuptools.dist:check_requirements -include_package_data = setuptools.dist:assert_bool -exclude_package_data = setuptools.dist:check_package_data -namespace_packages = setuptools.dist:check_nsp -test_suite = setuptools.dist:check_test_suite -eager_resources = setuptools.dist:assert_string_list -zip_safe = setuptools.dist:assert_bool -test_loader = setuptools.dist:check_importable -packages = setuptools.dist:check_packages -tests_require = setuptools.dist:check_requirements - -[setuptools.installation] -eggsecutable = setuptools.command.easy_install:bootstrap - diff --git a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/top_level.txt b/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/top_level.txt deleted file mode 100644 index 4577c6a..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/top_level.txt +++ /dev/null @@ -1,3 +0,0 @@ -easy_install -pkg_resources -setuptools diff --git a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/zip-safe b/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/EGG-INFO/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/zfec/setuptools-0.6c16dev3.egg/easy_install.py b/zfec/setuptools-0.6c16dev3.egg/easy_install.py deleted file mode 100644 index d87e984..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/zfec/setuptools-0.6c16dev3.egg/pkg_resources.py b/zfec/setuptools-0.6c16dev3.egg/pkg_resources.py deleted file mode 100644 index 00d9722..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/pkg_resources.py +++ /dev/null @@ -1,2653 +0,0 @@ -"""Package resource API --------------------- - -A resource is a logical file contained within a package, or a logical -subdirectory thereof. The package resource API expects resource names -to have their path parts separated with ``/``, *not* whatever the local -path separator is. Do not use os.path operations to manipulate resource -names being passed into the API. - -The package resource API is designed to work with normal filesystem packages, -.egg files, and unpacked .egg files. It can also work in a limited way with -.zip files and with custom PEP 302 loaders that support the ``get_data()`` -method. -""" - -import sys, os, zipimport, time, re, imp - -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset - -# capture these to bypass sandboxing -from os import utime, rename, unlink, mkdir -from os import open as os_open -from os.path import isdir, split - -def _bypass_ensure_directory(name, mode=0777): - # Sandbox-bypassing version of ensure_directory() - dirname, filename = split(name) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - mkdir(dirname, mode) - - - - - - - - -_state_vars = {} - -def _declare_state(vartype, **kw): - g = globals() - for name, val in kw.iteritems(): - g[name] = val - _state_vars[name] = vartype - -def __getstate__(): - state = {} - g = globals() - for k, v in _state_vars.iteritems(): - state[k] = g['_sget_'+v](g[k]) - return state - -def __setstate__(state): - g = globals() - for k, v in state.iteritems(): - g['_sset_'+_state_vars[k]](k, g[k], v) - return state - -def _sget_dict(val): - return val.copy() - -def _sset_dict(key, ob, state): - ob.clear() - ob.update(state) - -def _sget_object(val): - return val.__getstate__() - -def _sset_object(key, ob, state): - ob.__setstate__(state) - -_sget_none = _sset_none = lambda *args: None - - - - - - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform(); m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - pass # not Mac OS X - return plat - - - - - - - - - - - - - - - - - - - - - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', - 'ExtractionError', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - def __repr__(self): - return self.__class__.__name__+repr(self.args) - -class VersionConflict(ResolutionError): - """An already-installed version conflicts with the requested version""" - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" - -_provider_factories = {} -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq,Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - -def _macosx_vers(_cache=[]): - if not _cache: - from platform import mac_ver - _cache.append(mac_ver()[0].split('.')) - return _cache[0] - -def _macosx_arch(machine): - return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - from distutils.util import get_platform - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), - _macosx_arch(machine)) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -get_platform = get_build_platform # XXX backward compat - - - - - - - -def compatible_platforms(provided,required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided==required: - return True # easy case - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - - #import warnings - #warnings.warn("Mac eggs should be rebuilt to " - # "use the macosx designation instead of darwin.", - # category=DeprecationWarning) - return True - return False # egg isn't macosx or legacy darwin - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - -run_main = run_script # backward compatibility - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist,basestring): dist = Requirement.parse(dist) - if isinstance(dist,Requirement): dist = get_provider(dist) - if not isinstance(dist,Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -class IMetadataProvider: - - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - - - - - - - - - - - - - - - - - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - - - - - - - - - - - - - - -class WorkingSet(object): - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry, True)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - - def __contains__(self,dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - - - - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - raise VersionConflict(dist,req) # XXX add more info - else: - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - for dist in self: - entries = dist.get_entry_map(group) - if name is None: - for ep in entries.values(): - yield ep - elif name in entries: - yield entries[name] - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - for key in self.entry_keys[item]: - if key not in seen: - seen[key]=1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set. If it's added, any - callbacks registered with the ``subscribe()`` method will be called. - """ - if insert: - dist.insert_on(self.entries, entry) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry,[]) - keys2 = self.entry_keys.setdefault(dist.location,[]) - if dist.key in self.by_key: - return # ignore hidden distros - - # If we have a __requires__ then we can already tell if this - # dist is unsatisfactory, in which case we won't add it. - if __requires__ is not None: - for thisreqstr in __requires__: - for thisreq in parse_requirements(thisreqstr): - if thisreq.key == dist.key: - if dist not in thisreq: - return - - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - if dist.key not in keys2: - keys2.append(dist.key) - self._added_new(dist) - - def resolve(self, requirements, env=None, installer=None): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - """ - - requirements = list(requirements)[::-1] # set up the stack - processed = {} # set of processed requirements - best = {} # key -> dist - to_activate = [] - - while requirements: - req = requirements.pop(0) # process dependencies breadth-first - if req in processed: - # Ignore cyclic or redundant dependencies - continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None: - if env is None: - env = Environment(self.entries) - dist = best[req.key] = env.best_match(req, self, installer) - if dist is None: - raise DistributionNotFound(req) # XXX put more info here - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - raise VersionConflict(dist,req) # XXX put more info here - requirements.extend(dist.requires(req.extras)[::-1]) - processed[req] = True - - return to_activate # return list of distros to activate - - def find_plugins(self, - plugin_env, full_env=None, installer=None, fallback=True - ): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print "Couldn't load", errors # display errors - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - plugin_projects.sort() # scan project names in alphabetic order - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - map(shadow_set.add, self) # put all our entries in shadow_set - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError,v: - error_info[dist] = v # save error info - if fallback: - continue # try the next older version of project - else: - break # give up on this project, keep going - - else: - map(shadow_set.add, resolvees) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - - - - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - def subscribe(self, callback): - """Invoke `callback` for all distributions (including existing ones)""" - if callback in self.callbacks: - return - self.callbacks.append(callback) - for dist in self: - callback(dist) - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - def __getstate__(self): - return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] - ) - - def __setstate__(self, (entries, keys, by_key, callbacks)): - self.entries = entries[:] - self.entry_keys = keys.copy() - self.by_key = by_key.copy() - self.callbacks = callbacks[:] - - -class Environment(object): - """Searchable snapshot of distributions on a search path""" - - def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'2.4'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self._cache = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - return (self.python is None or dist.py_version is None - or dist.py_version==self.python) \ - and compatible_platforms(dist.platform,self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self,project_name): - """Return a newest-to-oldest list of distributions for `project_name` - """ - try: - return self._cache[project_name] - except KeyError: - project_name = project_name.lower() - if project_name not in self._distmap: - return [] - - if project_name not in self._cache: - dists = self._cache[project_name] = self._distmap[project_name] - _sort_dists(dists) - - return self._cache[project_name] - - def add(self,dist): - """Add `dist` if we ``can_add()`` it and it isn't already added""" - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key,[]) - if dist not in dists: - dists.append(dist) - if dist.key in self._cache: - _sort_dists(self._cache[dist.key]) - - - def best_match(self, req, working_set, installer=None): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) - - If a suitable distribution isn't active, this method returns the - newest platform-dependent distribution in the environment that meets - the ``Requirement`` in `req`. If no suitable platform-dependent - distribution is found, then the newest platform-independent - distribution that meets the requirement is returned. (A platform- - dependent distribution will typically have code compiled or - specialized for that platform.) - - Otherwise, if `installer` is supplied, then the result of calling the - environment's ``obtain(req, installer)`` method will be returned. - """ - dist = working_set.find(req) - if dist is not None: - return dist - - # first try to find a platform-dependent dist - for dist in self[req.key]: - if dist in req and dist.platform is not None: - return dist - - # then try any other dist - for dist in self[req.key]: - if dist in req: - return dist - - return self.obtain(req, installer) # try and download/install - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: yield key - - - - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other,Distribution): - self.add(other) - elif isinstance(other,Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -AvailableDistributions = Environment # XXX backward compatibility - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - err = ExtractionError("""Can't extract file(s) to egg cache - -The following error occurred while trying to extract file(s) to the Python egg -cache: - - %s - -The Python egg cache directory is currently set to: - - %s - -Perhaps your account does not have write access to this directory? You can -change the cache directory by setting the PYTHON_EGG_CACHE environment -variable to point to an accessible directory. -""" % (old_exc, cache_path) - ) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - - - - - - - - - - - - - - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) - try: - _bypass_ensure_directory(target_path) - except: - self.extraction_error() - - self.cached_files[target_path] = 1 - return target_path - - - - - - - - - - - - - - - - - - - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - - if os.name == 'posix': - # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0555) & 07777 - os.chmod(tempname, mode) - - - - - - - - - - - - - - - - - - - - - - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - - - -def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". - """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass - - if os.name!='nt': - return os.path.expanduser('~/.python-eggs') - - app_data = 'Application Data' # XXX this may be locale-specific! - app_homes = [ - (('APPDATA',), None), # best option, should be locale-safe - (('USERPROFILE',), app_data), - (('HOMEDRIVE','HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - (('WINDIR',), app_data), # 95/98/ME - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(dirname, os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname,subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE enviroment variable" - ) - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') - - - - - - - - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return StringIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info,name)) - - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)) - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self,resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self,name): - return self.egg_info and self._isdir(self._fn(self.egg_info,name)) - - - def resource_listdir(self,resource_name): - return self._listdir(self._fn(self.module_path,resource_name)) - - def metadata_listdir(self,name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info,name)) - return [] - - def run_script(self,script_name,namespace): - script = 'scripts/'+script_name - if not self.has_metadata(script): - raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n','\n') - script_text = script_text.replace('\r','\n') - script_filename = self._fn(self.egg_info,script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - execfile(script_filename, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text,script_filename,'exec') - exec script_code in namespace, namespace - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - if resource_name: - return os.path.join(base, *resource_name.split('/')) - return base - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self,module): - NullProvider.__init__(self,module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path!=old: - if path.lower().endswith('.egg'): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - - - - - - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self,path): - return os.path.isdir(path) - - def _listdir(self,path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - stream = open(path, 'rb') - try: - return stream.read() - finally: - stream.close() - -register_loader_type(type(None), DefaultProvider) - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - _isdir = _has = lambda self,path: False - _get = lambda self,path: '' - _listdir = lambda self,path: [] - module_path = None - - def __init__(self): - pass - -empty_provider = EmptyProvider() - - - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - - def __init__(self, module): - EggProvider.__init__(self,module) - self.zipinfo = zipimport._zip_directory_cache[self.loader.archive] - self.zip_pre = self.loader.archive+os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.zip_pre) - ) - - def _parts(self,zip_path): - # Convert a zipfile subpath into an egg-relative path part list - fspath = self.zip_pre+zip_path # pseudo-fs path - if fspath.startswith(self.egg_root+os.sep): - return fspath[len(self.egg_root)+1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.egg_root) - ) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - return os.path.dirname(last) # return the extracted directory name - - zip_stat = self.zipinfo[zip_path] - t,d,size = zip_stat[5], zip_stat[6], zip_stat[3] - date_time = ( - (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd - (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc. - ) - timestamp = time.mktime(date_time) - - try: - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if os.path.isfile(real_path): - stat = os.stat(real_path) - if stat.st_size==size and stat.st_mtime==timestamp: - # size and stamp match, don't bother extracting - return real_path - - outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp,timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - stat = os.stat(real_path) - - if stat.st_size==size and stat.st_mtime==timestamp: - # size and stamp match, somebody did it just ahead of - # us, so we're done - return real_path - elif os.name=='nt': # Windows, del old file and retry - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - manager.extraction_error() # report a user-friendly error - - return real_path - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self,fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self,fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.egg_root,resource_name)) - - def _resource_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.module_path,resource_name)) - -register_loader_type(zipimport.zipimporter, ZipProvider) - - - - - - - - - - - - - - - - - - - - - - - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self,path): - self.path = path - - def has_metadata(self,name): - return name=='PKG-INFO' - - def get_metadata(self,name): - if name=='PKG-INFO': - return open(self.path,'rU').read() - raise KeyError("No metadata except PKG-INFO is available") - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - - - - - - - - - - - - - - - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir,project_name=dist_name,metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zipinfo = zipimport._zip_directory_cache[importer.archive] - self.zip_pre = importer.archive+os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - - -class ImpWrapper: - """PEP 302 Importer that wraps Python's "normal" import algorithm""" - - def __init__(self, path=None): - self.path = path - - def find_module(self, fullname, path=None): - subname = fullname.split(".")[-1] - if subname != fullname and self.path is None: - return None - if self.path is None: - path = None - else: - path = [self.path] - try: - file, filename, etc = imp.find_module(subname, path) - except ImportError: - return None - return ImpLoader(file, filename, etc) - - -class ImpLoader: - """PEP 302 Loader that wraps Python's "normal" import algorithm""" - - def __init__(self, file, filename, etc): - self.file = file - self.filename = filename - self.etc = etc - - def load_module(self, fullname): - try: - mod = imp.load_module(fullname, self.file, self.filename, self.etc) - finally: - if self.file: self.file.close() - # Note: we don't set __loader__ because we want the module to look - # normal; i.e. this is just a wrapper for standard import machinery - return mod - - - - -def get_importer(path_item): - """Retrieve a PEP 302 "importer" for the given path item - - If there is no importer, this returns a wrapper around the builtin import - machinery. The returned importer is only cached if it was created by a - path hook. - """ - try: - importer = sys.path_importer_cache[path_item] - except KeyError: - for hook in sys.path_hooks: - try: - importer = hook(path_item) - except ImportError: - pass - else: - break - else: - importer = None - - sys.path_importer_cache.setdefault(path_item,importer) - if importer is None: - try: - importer = ImpWrapper(path_item) - except ImportError: - pass - return importer - - - - - - - - - - - - - - -_declare_state('dict', _distribution_finders = {}) - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - -def find_in_zip(importer, path_item, only=False): - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - return # don't yield nested distros - for subitem in metadata.resource_listdir('/'): - if subitem.endswith('.egg'): - subpath = os.path.join(path_item, subitem) - for dist in find_in_zip(zipimport.zipimporter(subpath), subpath): - yield dist - -register_finder(zipimport.zipimporter, find_in_zip) - -def StringIO(*args, **kw): - """Thunk to load the real StringIO on demand""" - global StringIO - try: - from cStringIO import StringIO - except ImportError: - from StringIO import StringIO - return StringIO(*args,**kw) - -def find_nothing(importer, path_item, only=False): - return () -register_finder(object,find_nothing) - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if os.path.isdir(path_item) and os.access(path_item, os.R_OK): - if path_item.lower().endswith('.egg'): - # unpacked egg - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item,'EGG-INFO') - ) - ) - else: - # scan for .egg and .egg-info in directory - for entry in os.listdir(path_item): - lower = entry.lower() - if lower.endswith('.egg-info'): - fullpath = os.path.join(path_item, entry) - if os.path.isdir(fullpath): - # egg-info directory, allow getting metadata - metadata = PathMetadata(path_item, fullpath) - else: - metadata = FileMetadata(fullpath) - yield Distribution.from_location( - path_item,entry,metadata,precedence=DEVELOP_DIST - ) - elif not only and lower.endswith('.egg'): - for dist in find_distributions(os.path.join(path_item, entry)): - yield dist - elif not only and lower.endswith('.egg-link'): - for line in file(os.path.join(path_item, entry)): - if not line.strip(): continue - for item in find_distributions(os.path.join(path_item,line.rstrip())): - yield item - break -register_finder(ImpWrapper, find_on_path) - -_declare_state('dict', _namespace_handlers = {}) -_declare_state('dict', _namespace_packages = {}) - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer,path_entry,moduleName,module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - importer = get_importer(path_item) - if importer is None: - return None - loader = importer.find_module(packageName) - if loader is None: - return None - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = imp.new_module(packageName) - module.__path__ = []; _set_parent_ns(packageName) - elif not hasattr(module,'__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer,path_item,packageName,module) - if subpath is not None: - path = module.__path__; path.append(subpath) - loader.load_module(packageName); module.__path__ = path - return subpath - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path, parent = sys.path, None - if '.' in packageName: - parent = '.'.join(packageName.split('.')[:-1]) - declare_namespace(parent) - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent,[]).append(packageName) - _namespace_packages.setdefault(packageName,[]) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - imp.release_lock() - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - imp.acquire_lock() - try: - for package in _namespace_packages.get(parent,()): - subpath = _handle_ns(package, path_item) - if subpath: fixup_namespace_packages(subpath,package) - finally: - imp.release_lock() - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item)==normalized: - break - else: - # Only return the path if it's not already there - return subpath - -register_namespace_handler(ImpWrapper,file_ns_handler) -register_namespace_handler(zipimport.zipimporter,file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - -register_namespace_handler(object,null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(filename)) - -def _normalize_cached(filename,_cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" - if isinstance(strs,basestring): - for s in strs.splitlines(): - s = s.strip() - if s and not s.startswith('#'): # skip blank lines/comments - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - -LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation -DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra -VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info -COMMA = re.compile(r"\s*,").match # comma between items -OBRACKET = re.compile(r"\s*\[").match -CBRACKET = re.compile(r"\s*\]").match -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r"(?P[^-]+)" - r"( -(?P[^-]+) (-py(?P[^-]+) (-(?P.+))? )? )?", - re.VERBOSE | re.IGNORECASE -).match - -component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) -replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get - -def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part,part) - if not part or part=='.': - continue - if part[:1] in '0123456789': - yield part.zfill(8) # pad for numeric comparison - else: - yield '*'+part - - yield '*final' # ensure that alpha/beta/candidate are before final - -def parse_version(s): - """Convert a version string to a chronologically-sortable key - - This is a rough cross between distutils' StrictVersion and LooseVersion; - if you give it versions that would work with StrictVersion, then it behaves - the same; otherwise it acts like a slightly-smarter LooseVersion. It is - *possible* to create pathological version coding schemes that will fool - this parser, but they should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", which in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them, and "dev" is replaced with an '@' so that it sorts lower than - than any other pre-release tag. - """ - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - if part<'*final': # remove '-' before a prerelease tag - while parts and parts[-1]=='*final-': parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1]=='00000000': - parts.pop() - parts.append(part) - return tuple(parts) - -class EntryPoint(object): - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, env=None, installer=None): - if require: self.require(env, installer) - entry = __import__(self.module_name, globals(),globals(), ['__name__']) - for attr in self.attrs: - try: - entry = getattr(entry,attr) - except AttributeError: - raise ImportError("%r has no %r attribute" % (entry,attr)) - return entry - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - map(working_set.add, - working_set.resolve(self.dist.requires(self.extras),env,installer)) - - - - #@classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1,extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - try: - attrs = extras = () - name,value = src.split('=',1) - if '[' in value: - value,extras = value.split('[',1) - req = Requirement.parse("x["+extras) - if req.specs: raise ValueError - extras = req.extras - if ':' in value: - value,attrs = value.split(':',1) - if not MODULE(attrs.rstrip()): - raise ValueError - attrs = attrs.rstrip().split('.') - except ValueError: - raise ValueError( - "EntryPoint must be in 'name=module:attrs [extras]' format", - src - ) - else: - return cls(name.strip(), value.strip(), attrs, extras, dist) - - parse = classmethod(parse) - - - - - - - - - #@classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name]=ep - return this - - parse_group = classmethod(parse_group) - - #@classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data,dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - parse_map = classmethod(parse_map) - - - - - - -class Distribution(object): - """Wrap an actual or potential sys.path entry w/metadata""" - def __init__(self, - location=None, metadata=None, project_name=None, version=None, - py_version=PY_MAJOR, platform=None, precedence = EGG_DIST - ): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - #@classmethod - def from_location(cls,location,basename,metadata=None,**kw): - project_name, version, py_version, platform = [None]*4 - basename, ext = os.path.splitext(basename) - if ext.lower() in (".egg",".egg-info"): - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name','ver','pyver','plat' - ) - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - ) - from_location = classmethod(from_location) - - hashcmp = property( - lambda self: ( - getattr(self,'parsed_version',()), self.precedence, self.key, - -len(self.location or ''), self.location, self.py_version, - self.platform - ) - ) - def __cmp__(self, other): return cmp(self.hashcmp, other) - def __hash__(self): return hash(self.hashcmp) - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - #@property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - key = property(key) - - #@property - def parsed_version(self): - try: - return self._parsed_version - except AttributeError: - self._parsed_version = pv = parse_version(self.version) - return pv - - parsed_version = property(parsed_version) - - #@property - def version(self): - try: - return self._version - except AttributeError: - for line in self._get_metadata('PKG-INFO'): - if line.lower().startswith('version:'): - self._version = safe_version(line.split(':',1)[1].strip()) - return self._version - else: - raise ValueError( - "Missing 'Version:' header and/or PKG-INFO file", self - ) - version = property(version) - - - - - #@property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - dm = self.__dep_map = {None: []} - for name in 'requires.txt', 'depends.txt': - for extra,reqs in split_sections(self._get_metadata(name)): - if extra: extra = safe_extra(extra) - dm.setdefault(extra,[]).extend(parse_requirements(reqs)) - return dm - _dep_map = property(_dep_map) - - def requires(self,extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None,())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata(self,name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def activate(self,path=None): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: path = sys.path - self.insert_on(path) - if path is sys.path: - fixup_namespace_packages(self.location) - for pkg in self._get_metadata('namespace_packages.txt'): - if pkg in sys.modules: declare_namespace(pkg) - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-'+self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self,self.location) - else: - return str(self) - - def __str__(self): - try: version = getattr(self,'version',None) - except ValueError: version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name,version) - - def __getattr__(self,attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError,attr - return getattr(self._provider, attr) - - #@classmethod - def from_filename(cls,filename,metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - from_filename = classmethod(from_filename) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - return Requirement.parse('%s==%s' % (self.project_name, self.version)) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group,name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group,name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group,{}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - - - - - - - - - - - - - - - - - - - def insert_on(self, path, loc = None): - """Insert self.location in path before its nearest parent directory""" - - loc = loc or self.location - if not loc: - return - - nloc = _normalize_cached(loc) - bdir = os.path.dirname(nloc) - npath= [(p and _normalize_cached(p) or p) for p in path] - - bp = None - for p, item in enumerate(npath): - if item==nloc: - break - elif item==bdir and self.precedence==EGG_DIST: - # if it's an .egg, give it precedence over its directory - if path is sys.path: - self.check_version_conflict() - path.insert(p, loc) - npath.insert(p, nloc) - break - else: - if path is sys.path: - self.check_version_conflict() - path.append(loc) - return - - # p is the spot where we found or inserted loc; now remove duplicates - while 1: - try: - np = npath.index(nloc, p+1) - except ValueError: - break - else: - del npath[np], path[np] - p = np # ha! - - return - - - def check_version_conflict(self): - if self.key=='setuptools': - return # ignore the inevitable setuptools self-conflicts :( - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages - ): - continue - - fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for "+repr(self)) - return False - return True - - def clone(self,**kw): - """Copy this distribution, substituting in any changed keyword args""" - for attr in ( - 'project_name', 'version', 'py_version', 'platform', 'location', - 'precedence' - ): - kw.setdefault(attr, getattr(self,attr,None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - - - - #@property - def extras(self): - return [dep for dep in self._dep_map if dep] - extras = property(extras) - - -def issue_warning(*args,**kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - from warnings import warn - warn(stacklevel = level+1, *args, **kw) - - - - - - - - - - - - - - - - - - - - - - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be an instance of ``basestring``, or a (possibly-nested) - iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): - - items = [] - - while not TERMINATOR(line,p): - if CONTINUE(line,p): - try: - line = lines.next(); p = 0 - except StopIteration: - raise ValueError( - "\\ must not appear on the last nonblank line" - ) - - match = ITEM(line,p) - if not match: - raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) - - items.append(match.group(*groups)) - p = match.end() - - match = COMMA(line,p) - if match: - p = match.end() # skip the comma - elif not TERMINATOR(line,p): - raise ValueError( - "Expected ',' or end-of-list in",line,"at",line[p:] - ) - - match = TERMINATOR(line,p) - if match: p = match.end() # skip the terminator, if any - return line, p, items - - for line in lines: - match = DISTRO(line) - if not match: - raise ValueError("Missing distribution spec", line) - project_name = match.group(1) - p = match.end() - extras = [] - - match = OBRACKET(line,p) - if match: - p = match.end() - line, p, extras = scan_list( - DISTRO, CBRACKET, line, p, (1,), "'extra' name" - ) - - line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") - specs = [(op,safe_version(val)) for op,val in specs] - yield Requirement(project_name, specs, extras) - - -def _sort_dists(dists): - tmp = [(dist.hashcmp,dist) for dist in dists] - tmp.sort() - dists[::-1] = [d for hc,d in tmp] - - - - - - - - - - - - - - - - - -class Requirement: - def __init__(self, project_name, specs, extras): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - self.unsafe_name, project_name = project_name, safe_name(project_name) - self.project_name, self.key = project_name, project_name.lower() - index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] - index.sort() - self.specs = [(op,ver) for parsed,trans,op,ver in index] - self.index, self.extras = index, tuple(map(safe_extra,extras)) - self.hashCmp = ( - self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), - frozenset(self.extras) - ) - self.__hash = hash(self.hashCmp) - - def __str__(self): - specs = ','.join([''.join(s) for s in self.specs]) - extras = ','.join(self.extras) - if extras: extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, specs) - - def __eq__(self,other): - return isinstance(other,Requirement) and self.hashCmp==other.hashCmp - - def __contains__(self,item): - if isinstance(item,Distribution): - if item.key != self.key: return False - if self.index: item = item.parsed_version # only get if we need it - elif isinstance(item,basestring): - item = parse_version(item) - last = None - for parsed,trans,op,ver in self.index: - action = trans[cmp(item,parsed)] - if action=='F': return False - elif action=='T': return True - elif action=='+': last = True - elif action=='-' or last is None: last = False - if last is None: last = True # no rules encountered - return last - - - def __hash__(self): - return self.__hash - - def __repr__(self): return "Requirement.parse(%r)" % str(self) - - #@staticmethod - def parse(s): - reqs = list(parse_requirements(s)) - if reqs: - if len(reqs)==1: - return reqs[0] - raise ValueError("Expected only one requirement", s) - raise ValueError("No requirements found", s) - - parse = staticmethod(parse) - -state_machine = { - # =>< - '<' : '--T', - '<=': 'T-T', - '>' : 'F+F', - '>=': 'T+F', - '==': 'T..', - '!=': 'F++', -} - - -def _get_mro(cls): - """Get an mro for a type or classic class""" - if not isinstance(cls,type): - class cls(cls,object): pass - return cls.__mro__[1:] - return cls.__mro__ - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - for t in _get_mro(getattr(ob, '__class__', type(ob))): - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - -def split_sections(s): - """Split a string or iterable thereof into (section,content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - -def _mkstemp(*args,**kw): - from tempfile import mkstemp - old_open = os.open - try: - os.open = os_open # temporarily bypass sandboxing - return mkstemp(*args,**kw) - finally: - os.open = old_open # and then put it back - - -# Set up global resource manager (deliberately not state-saved) -_manager = ResourceManager() -def _initialize(g): - for name in dir(_manager): - if not name.startswith('_'): - g[name] = getattr(_manager, name) -_initialize(globals()) - -# Prepare the master working set and make the ``require()`` API available -__requires__ = None -_declare_state('object', working_set = WorkingSet()) -try: - # Does the main program list any requirements? - from __main__ import __requires__ -except ImportError: - pass # No: just use the default working set based on sys.path -else: - # Yes: ensure the requirements are met, by prefixing sys.path if necessary - try: - working_set.require(__requires__) - except (VersionConflict, DistributionNotFound): # try it without defaults already on sys.path - working_set = WorkingSet([]) # by starting with an empty path - try: - for dist in working_set.resolve( - parse_requirements(__requires__), Environment() - ): - working_set.add(dist) - except DistributionNotFound: - pass - for entry in sys.path: # add any missing entries from sys.path - if entry not in working_set.entries: - working_set.add_entry(entry) - sys.path[:] = working_set.entries # then copy back to sys.path - -require = working_set.require -iter_entry_points = working_set.iter_entry_points -add_activation_listener = working_set.subscribe -run_script = working_set.run_script -run_main = run_script # backward compatibility -# Activate all distributions already on sys.path, and ensure that -# all distributions added to the working set in the future (e.g. by -# calling ``require()``) will get activated as well. -add_activation_listener(lambda dist: dist.activate()) -working_set.entries=[]; map(working_set.add_entry,sys.path) # match order - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/__init__.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/__init__.py deleted file mode 100644 index d57448f..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/__init__.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" -from setuptools.extension import Extension, Library -from setuptools.dist import Distribution, Feature, _get_unpatched -import distutils.core, setuptools.command -from setuptools.depends import Require -from distutils.core import Command as _Command -from distutils.util import convert_path -import os.path -import os -import sys - -__version__ = '0.6c16dev3' -__all__ = [ - 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' -] - -bootstrap_install_from = None - -def find_packages(where='.', exclude=()): - """Return a list all Python packages found within directory 'where' - - 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it - will be converted to the appropriate local path syntax. 'exclude' is a - sequence of package names to exclude; '*' can be used as a wildcard in the - names, such that 'foo.*' will exclude all subpackages of 'foo' (but not - 'foo' itself). - """ - out = [] - stack=[(convert_path(where), '')] - while stack: - where,prefix = stack.pop(0) - for name in os.listdir(where): - fn = os.path.join(where,name) - if ('.' not in name and os.path.isdir(fn) and - os.path.isfile(os.path.join(fn,'__init__.py')) - ): - out.append(prefix+name); stack.append((fn,prefix+name+'.')) - for pat in list(exclude)+['ez_setup']: - from fnmatch import fnmatchcase - out = [item for item in out if not fnmatchcase(item,pat)] - return out - -setup = distutils.core.setup - -_Command = _get_unpatched(_Command) - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - # Add support for keyword arguments - _Command.__init__(self,dist) - for k,v in kw.items(): - setattr(self,k,v) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - for k,v in kw.items(): - setattr(cmd,k,v) # update command with keywords - return cmd - -import distutils.core -distutils.core.Command = Command # we can't patch distutils.cmd, alas - -def findall(dir = os.curdir): - """Find all files under 'dir' and return the list of full filenames - (relative to 'dir'). - """ - all_files = [] - for base, dirs, files in os.walk(dir): - if base==os.curdir or base.startswith(os.curdir+os.sep): - base = base[2:] - if base: - files = [os.path.join(base, f) for f in files] - all_files.extend(filter(os.path.isfile, files)) - return all_files - -import distutils.filelist -distutils.filelist.findall = findall # fix findall bug in distutils. - - -# sys.dont_write_bytecode was introduced in Python 2.6. -if ((hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode) or - (not hasattr(sys, "dont_write_bytecode") and os.environ.get("PYTHONDONTWRITEBYTECODE"))): - _dont_write_bytecode = True -else: - _dont_write_bytecode = False diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/archive_util.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/archive_util.py deleted file mode 100644 index cd4c3fb..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/archive_util.py +++ /dev/null @@ -1,205 +0,0 @@ -"""Utilities for extracting common archive formats""" - - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - -import zipfile, tarfile, os, shutil -from pkg_resources import ensure_directory -from distutils.errors import DistutilsError - -class UnrecognizedFormat(DistutilsError): - """Couldn't recognize the archive type""" - -def default_filter(src,dst): - """The default progress/filter callback; returns True for all files""" - return dst - - - - - - - - - - - - - - - - - - - - - - - -def unpack_archive(filename, extract_dir, progress_filter=default_filter, - drivers=None -): - """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` - - `progress_filter` is a function taking two arguments: a source path - internal to the archive ('/'-separated), and a filesystem path where it - will be extracted. The callback must return the desired extract path - (which may be the same as the one passed in), or else ``None`` to skip - that file or directory. The callback can thus be used to report on the - progress of the extraction, as well as to filter the items extracted or - alter their extraction paths. - - `drivers`, if supplied, must be a non-empty sequence of functions with the - same signature as this function (minus the `drivers` argument), that raise - ``UnrecognizedFormat`` if they do not support extracting the designated - archive type. The `drivers` are tried in sequence until one is found that - does not raise an error, or until all are exhausted (in which case - ``UnrecognizedFormat`` is raised). If you do not supply a sequence of - drivers, the module's ``extraction_drivers`` constant will be used, which - means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that - order. - """ - for driver in drivers or extraction_drivers: - try: - driver(filename, extract_dir, progress_filter) - except UnrecognizedFormat: - continue - else: - return - else: - raise UnrecognizedFormat( - "Not a recognized archive type: %s" % filename - ) - - - - - - - -def unpack_directory(filename, extract_dir, progress_filter=default_filter): - """"Unpack" a directory, using the same interface as for archives - - Raises ``UnrecognizedFormat`` if `filename` is not a directory - """ - if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % (filename,)) - - paths = {filename:('',extract_dir)} - for base, dirs, files in os.walk(filename): - src,dst = paths[base] - for d in dirs: - paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) - for f in files: - name = src+f - target = os.path.join(dst,f) - target = progress_filter(src+f, target) - if not target: - continue # skip non-files - ensure_directory(target) - f = os.path.join(base,f) - shutil.copyfile(f, target) - shutil.copystat(f, target) - - - - - - - - - - - - - - - - - - -def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): - """Unpack zip `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined - by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - if not zipfile.is_zipfile(filename): - raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - - z = zipfile.ZipFile(filename) - try: - for info in z.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name: - continue - - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - f = open(target,'wb') - try: - f.write(data) - finally: - f.close() - del data - finally: - z.close() - - -def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined - by ``tarfile.open()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise UnrecognizedFormat( - "%s is not a compressed or uncompressed tar file" % (filename,) - ) - - try: - tarobj.chown = lambda *args: None # don't do any chowning! - for member in tarobj: - if member.isfile() or member.isdir(): - name = member.name - # don't extract absolute paths or ones with .. in them - if not name.startswith('/') and '..' not in name: - dst = os.path.join(extract_dir, *name.split('/')) - dst = progress_filter(name, dst) - if dst: - if dst.endswith(os.sep): - dst = dst[:-1] - try: - tarobj._extract_member(member,dst) # XXX Ugh - except tarfile.ExtractError: - pass # chown/chmod/mkfifo/mknode/makedev failed - return True - finally: - tarobj.close() - - - - -extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/cli.exe b/zfec/setuptools-0.6c16dev3.egg/setuptools/cli.exe deleted file mode 100644 index 3173b2b..0000000 Binary files a/zfec/setuptools-0.6c16dev3.egg/setuptools/cli.exe and /dev/null differ diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/__init__.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/__init__.py deleted file mode 100644 index 80969f3..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -__all__ = [ - 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', - 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', - 'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts', - 'register', 'bdist_wininst', 'scriptsetup', -] - -import sys -if sys.version>='2.5': - # In Python 2.5 and above, distutils includes its own upload command - __all__.remove('upload') - - -from distutils.command.bdist import bdist - -if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') - -del bdist, sys diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/alias.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/alias.py deleted file mode 100644 index 3e69ef6..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/alias.py +++ /dev/null @@ -1,79 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * -from setuptools.command.setopt import edit_config, option_base, config_file - -def shquote(arg): - """Quote an argument for later parsing by shlex.split()""" - for c in '"', "'", "\\", "#": - if c in arg: return repr(arg) - if arg.split()!=[arg]: - return repr(arg) - return arg - - -class alias(option_base): - """Define a shortcut that invokes one or more commands""" - - description = "define a shortcut to invoke one or more commands" - command_consumes_arguments = True - - user_options = [ - ('remove', 'r', 'remove (unset) the alias'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.args = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.remove and len(self.args)!=1: - raise DistutilsOptionError( - "Must specify exactly one argument (the alias name) when " - "using --remove" - ) - - def run(self): - aliases = self.distribution.get_option_dict('aliases') - - if not self.args: - print "Command Aliases" - print "---------------" - for alias in aliases: - print "setup.py alias", format_alias(alias, aliases) - return - - elif len(self.args)==1: - alias, = self.args - if self.remove: - command = None - elif alias in aliases: - print "setup.py alias", format_alias(alias, aliases) - return - else: - print "No alias definition found for %r" % alias - return - else: - alias = self.args[0] - command = ' '.join(map(shquote,self.args[1:])) - - edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run) - - -def format_alias(name, aliases): - source, command = aliases[name] - if source == config_file('global'): - source = '--global-config ' - elif source == config_file('user'): - source = '--user-config ' - elif source == config_file('local'): - source = '' - else: - source = '--filename=%r' % source - return source+name+' '+command diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_egg.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_egg.py deleted file mode 100644 index 7e5a379..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_egg.py +++ /dev/null @@ -1,533 +0,0 @@ -"""setuptools.command.bdist_egg - -Build .egg distributions""" - -# This module should be kept compatible with Python 2.3 -import sys, os, marshal -from setuptools import Command -from distutils.dir_util import remove_tree, mkpath -from distutils.sysconfig import get_python_version, get_python_lib -from distutils import log -from distutils.errors import DistutilsSetupError -from pkg_resources import get_build_platform, Distribution, ensure_directory -from pkg_resources import EntryPoint -from types import CodeType -from setuptools.extension import Library - -def strip_module(filename): - if '.' in filename: - filename = os.path.splitext(filename)[0] - if filename.endswith('module'): - filename = filename[:-6] - return filename - -def write_stub(resource, pyfile): - f = open(pyfile,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __loader__, __file__", - " import sys, pkg_resources, imp", - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % resource, - " __loader__ = None; del __bootstrap__, __loader__", - " imp.load_dynamic(__name__,__file__)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - -# stub __init__.py for packages distributed without one -NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)' - -class bdist_egg(Command): - - description = "create an \"egg\" distribution" - - user_options = [ - ('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), - ('exclude-source-files', None, - "remove all .py files from the generated egg"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ] - - boolean_options = [ - 'keep-temp', 'skip-build', 'exclude-source-files' - ] - - - - - - - - - - - - - - - - - - def initialize_options (self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = 0 - self.egg_output = None - self.exclude_source_files = None - - - def finalize_options(self): - ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") - self.egg_info = ei_cmd.egg_info - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'egg') - - if self.plat_name is None: - self.plat_name = get_build_platform() - - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - if self.egg_output is None: - - # Compute filename of the output egg - basename = Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version, - get_python_version(), - self.distribution.has_ext_modules() and self.plat_name - ).egg_name() - - self.egg_output = os.path.join(self.dist_dir, basename+'.egg') - - - - - - - - - def do_install_data(self): - # Hack for packages that install data to install's --install-lib - self.get_finalized_command('install').install_lib = self.bdist_dir - - site_packages = os.path.normcase(os.path.realpath(get_python_lib())) - old, self.distribution.data_files = self.distribution.data_files,[] - - for item in old: - if isinstance(item,tuple) and len(item)==2: - if os.path.isabs(item[0]): - realpath = os.path.realpath(item[0]) - normalized = os.path.normcase(realpath) - if normalized==site_packages or normalized.startswith( - site_packages+os.sep - ): - item = realpath[len(site_packages)+1:], item[1] - # XXX else: raise ??? - self.distribution.data_files.append(item) - - try: - log.info("installing package data to %s" % self.bdist_dir) - self.call_command('install_data', force=0, root=None) - finally: - self.distribution.data_files = old - - - def get_outputs(self): - return [self.egg_output] - - - def call_command(self,cmdname,**kw): - """Invoke reinitialized command `cmdname` with keyword args""" - for dirname in INSTALL_DIRECTORY_ATTRS: - kw.setdefault(dirname,self.bdist_dir) - kw.setdefault('skip_build',self.skip_build) - kw.setdefault('dry_run', self.dry_run) - cmd = self.reinitialize_command(cmdname, **kw) - self.run_command(cmdname) - return cmd - - - def run(self): - # Generate metadata first - self.run_command("egg_info") - # We run install_lib before install_data, because some data hacks - # pull their data path from the install_lib command. - log.info("installing library code to %s" % self.bdist_dir) - instcmd = self.get_finalized_command('install') - old_root = instcmd.root; instcmd.root = None - if self.distribution.has_c_libraries() and not self.skip_build: - self.run_command('build_clib') - cmd = self.call_command('install_lib', warn_dir=0) - instcmd.root = old_root - - all_outputs, ext_outputs = self.get_ext_outputs() - self.stubs = [] - to_compile = [] - for (p,ext_name) in enumerate(ext_outputs): - filename,ext = os.path.splitext(ext_name) - pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py') - self.stubs.append(pyfile) - log.info("creating stub loader for %s" % ext_name) - if not self.dry_run: - write_stub(os.path.basename(ext_name), pyfile) - to_compile.append(pyfile) - ext_outputs[p] = ext_name.replace(os.sep,'/') - - to_compile.extend(self.make_init_files()) - if to_compile: - cmd.byte_compile(to_compile) - if self.distribution.data_files: - self.do_install_data() - - # Make the EGG-INFO directory - archive_root = self.bdist_dir - egg_info = os.path.join(archive_root,'EGG-INFO') - self.mkpath(egg_info) - if self.distribution.scripts: - script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s" % script_dir) - self.call_command('install_scripts',install_dir=script_dir,no_ep=1) - - self.copy_metadata_to(egg_info) - native_libs = os.path.join(egg_info, "native_libs.txt") - if all_outputs: - log.info("writing %s" % native_libs) - if not self.dry_run: - ensure_directory(native_libs) - libs_file = open(native_libs, 'wt') - libs_file.write('\n'.join(all_outputs)) - libs_file.write('\n') - libs_file.close() - elif os.path.isfile(native_libs): - log.info("removing %s" % native_libs) - if not self.dry_run: - os.unlink(native_libs) - - write_safety_flag( - os.path.join(archive_root,'EGG-INFO'), self.zip_safe() - ) - - if os.path.exists(os.path.join(self.egg_info,'depends.txt')): - log.warn( - "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - if self.exclude_source_files: - self.zap_pyfiles() - - # Make the archive - make_zipfile(self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run, mode=self.gen_header()) - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution,'dist_files',[]).append( - ('bdist_egg',get_python_version(),self.egg_output)) - - - - - def zap_pyfiles(self): - log.info("Removing .py files from temporary directory") - for base,dirs,files in walk_egg(self.bdist_dir): - for name in files: - if name.endswith('.py'): - path = os.path.join(base,name) - log.debug("Deleting %s", path) - os.unlink(path) - - def zip_safe(self): - safe = getattr(self.distribution,'zip_safe',None) - if safe is not None: - return safe - log.warn("zip_safe flag not set; analyzing archive contents...") - return analyze_egg(self.bdist_dir, self.stubs) - - def make_init_files(self): - """Create missing package __init__ files""" - init_files = [] - for base,dirs,files in walk_egg(self.bdist_dir): - if base==self.bdist_dir: - # don't put an __init__ in the root - continue - for name in files: - if name.endswith('.py'): - if '__init__.py' not in files: - pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.') - if self.distribution.has_contents_for(pkg): - log.warn("Creating missing __init__.py for %s",pkg) - filename = os.path.join(base,'__init__.py') - if not self.dry_run: - f = open(filename,'w'); f.write(NS_PKG_STUB) - f.close() - init_files.append(filename) - break - else: - # not a package, don't traverse to subdirectories - dirs[:] = [] - - return init_files - - def gen_header(self): - epm = EntryPoint.parse_map(self.distribution.entry_points or '') - ep = epm.get('setuptools.installation',{}).get('eggsecutable') - if ep is None: - return 'w' # not an eggsecutable, do it the usual way. - - if not ep.attrs or ep.extras: - raise DistutilsSetupError( - "eggsecutable entry point (%r) cannot have 'extras' " - "or refer to a module" % (ep,) - ) - - pyver = sys.version[:3] - pkg = ep.module_name - full = '.'.join(ep.attrs) - base = ep.attrs[0] - basename = os.path.basename(self.egg_output) - - header = ( - "#!/bin/sh\n" - 'if [ `basename $0` = "%(basename)s" ]\n' - 'then exec python%(pyver)s -c "' - "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " - "from %(pkg)s import %(base)s; sys.exit(%(full)s())" - '" "$@"\n' - 'else\n' - ' echo $0 is not the correct name for this egg file.\n' - ' echo Please rename it back to %(basename)s and try again.\n' - ' exec false\n' - 'fi\n' - - ) % locals() - - if not self.dry_run: - mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) - f = open(self.egg_output, 'w') - f.write(header) - f.close() - return 'a' - - - def copy_metadata_to(self, target_dir): - prefix = os.path.join(self.egg_info,'') - for path in self.ei_cmd.filelist.files: - if path.startswith(prefix): - target = os.path.join(target_dir, path[len(prefix):]) - ensure_directory(target) - self.copy_file(path, target) - - def get_ext_outputs(self): - """Get a list of relative paths to C extensions in the output distro""" - - all_outputs = [] - ext_outputs = [] - - paths = {self.bdist_dir:''} - for base, dirs, files in os.walk(self.bdist_dir): - for filename in files: - if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: - all_outputs.append(paths[base]+filename) - for filename in dirs: - paths[os.path.join(base,filename)] = paths[base]+filename+'/' - - if self.distribution.has_ext_modules(): - build_cmd = self.get_finalized_command('build_ext') - for ext in build_cmd.extensions: - if isinstance(ext,Library): - continue - fullname = build_cmd.get_ext_fullname(ext.name) - filename = build_cmd.get_ext_filename(fullname) - if not os.path.basename(filename).startswith('dl-'): - if os.path.exists(os.path.join(self.bdist_dir,filename)): - ext_outputs.append(filename) - - return all_outputs, ext_outputs - - -NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) - - - - -def walk_egg(egg_dir): - """Walk an unpacked egg's contents, skipping the metadata directory""" - walker = os.walk(egg_dir) - base,dirs,files = walker.next() - if 'EGG-INFO' in dirs: - dirs.remove('EGG-INFO') - yield base,dirs,files - for bdf in walker: - yield bdf - -def analyze_egg(egg_dir, stubs): - # check for existing flag in EGG-INFO - for flag,fn in safety_flags.items(): - if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)): - return flag - if not can_scan(): return False - safe = True - for base, dirs, files in walk_egg(egg_dir): - for name in files: - if name.endswith('.py') or name.endswith('.pyw'): - continue - elif name.endswith('.pyc') or name.endswith('.pyo'): - # always scan, even if we already know we're not safe - safe = scan_module(egg_dir, base, name, stubs) and safe - return safe - -def write_safety_flag(egg_dir, safe): - # Write or remove zip safety flag file(s) - for flag,fn in safety_flags.items(): - fn = os.path.join(egg_dir, fn) - if os.path.exists(fn): - if safe is None or bool(safe)!=flag: - os.unlink(fn) - elif safe is not None and bool(safe)==flag: - f=open(fn,'wb'); f.write('\n'); f.close() - -safety_flags = { - True: 'zip-safe', - False: 'not-zip-safe', -} - -def scan_module(egg_dir, base, name, stubs): - """Check whether module possibly uses unsafe-for-zipfile stuff""" - - filename = os.path.join(base,name) - if filename[:-1] in stubs: - return True # Extension module - pkg = base[len(egg_dir)+1:].replace(os.sep,'.') - module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0] - f = open(filename,'rb'); f.read(8) # skip magic & date - code = marshal.load(f); f.close() - safe = True - symbols = dict.fromkeys(iter_symbols(code)) - for bad in ['__file__', '__path__']: - if bad in symbols: - log.warn("%s: module references %s", module, bad) - safe = False - if 'inspect' in symbols: - for bad in [ - 'getsource', 'getabsfile', 'getsourcefile', 'getfile' - 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', - 'getinnerframes', 'getouterframes', 'stack', 'trace' - ]: - if bad in symbols: - log.warn("%s: module MAY be using inspect.%s", module, bad) - safe = False - if '__name__' in symbols and '__main__' in symbols and '.' not in module: - if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5 - log.warn("%s: top-level module may be 'python -m' script", module) - safe = False - return safe - -def iter_symbols(code): - """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: yield name - for const in code.co_consts: - if isinstance(const,basestring): - yield const - elif isinstance(const,CodeType): - for name in iter_symbols(const): - yield name - -def can_scan(): - if not sys.platform.startswith('java') and sys.platform != 'cli': - # CPython, PyPy, etc. - return True - log.warn("Unable to analyze compiled code on this platform.") - log.warn("Please ask the author to include a 'zip_safe'" - " setting (either True or False) in the package's setup.py") - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# Attribute names of options for commands that might need to be convinced to -# install to the egg build directory - -INSTALL_DIRECTORY_ATTRS = [ - 'install_lib', 'install_dir', 'install_data', 'install_base' -] - -def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, - mode='w' -): - """Create a zip file from all the files under 'base_dir'. The output - zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" - Python module (if available) or the InfoZIP "zip" utility (if installed - and found on the default search path). If neither tool is available, - raises DistutilsExecError. Returns the name of the output zip file. - """ - import zipfile - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) - - def visit(z, dirname, names): - for name in names: - path = os.path.normpath(os.path.join(dirname, name)) - if os.path.isfile(path): - p = path[len(base_dir)+1:] - if not dry_run: - z.write(path, p) - log.debug("adding '%s'" % p) - - if compress is None: - compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits - - compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)] - if not dry_run: - z = zipfile.ZipFile(zip_filename, mode, compression=compression) - os.path.walk(base_dir, visit, z) - z.close() - else: - os.path.walk(base_dir, visit, None) - return zip_filename -# diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_rpm.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_rpm.py deleted file mode 100644 index 8c48da3..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,82 +0,0 @@ -# This is just a kludge so that bdist_rpm doesn't guess wrong about the -# distribution name and version, if the egg_info command is going to alter -# them, another kludge to allow you to build old-style non-egg RPMs, and -# finally, a kludge to track .rpm files for uploading when run on Python <2.5. - -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm -import sys, os - -class bdist_rpm(_bdist_rpm): - - def initialize_options(self): - _bdist_rpm.initialize_options(self) - self.no_egg = None - - if sys.version<"2.5": - # Track for uploading any .rpm file(s) moved to self.dist_dir - def move_file(self, src, dst, level=1): - _bdist_rpm.move_file(self, src, dst, level) - if dst==self.dist_dir and src.endswith('.rpm'): - getattr(self.distribution,'dist_files',[]).append( - ('bdist_rpm', - src.endswith('.src.rpm') and 'any' or sys.version[:3], - os.path.join(dst, os.path.basename(src))) - ) - - def run(self): - self.run_command('egg_info') # ensure distro name is up-to-date - _bdist_rpm.run(self) - - - - - - - - - - - - - - def _make_spec_file(self): - version = self.distribution.get_version() - rpmversion = version.replace('-','_') - spec = _bdist_rpm._make_spec_file(self) - line23 = '%define version '+version - line24 = '%define version '+rpmversion - spec = [ - line.replace( - "Source0: %{name}-%{version}.tar", - "Source0: %{name}-%{unmangled_version}.tar" - ).replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ).replace(line23,line24) - for line in spec - ] - spec.insert(spec.index(line24)+1, "%define unmangled_version "+version) - return spec - - - - - - - - - - - - - - - - - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_wininst.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_wininst.py deleted file mode 100644 index e8521f8..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/bdist_wininst.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst -import os, sys - -class bdist_wininst(_bdist_wininst): - _good_upload = _bad_upload = None - - def create_exe(self, arcname, fullname, bitmap=None): - _bdist_wininst.create_exe(self, arcname, fullname, bitmap) - installer_name = self.get_installer_filename(fullname) - if self.target_version: - pyversion = self.target_version - # fix 2.5+ bdist_wininst ignoring --target-version spec - self._bad_upload = ('bdist_wininst', 'any', installer_name) - else: - pyversion = 'any' - self._good_upload = ('bdist_wininst', pyversion, installer_name) - - def _fix_upload_names(self): - good, bad = self._good_upload, self._bad_upload - dist_files = getattr(self.distribution, 'dist_files', []) - if bad in dist_files: - dist_files.remove(bad) - if good not in dist_files: - dist_files.append(good) - - def reinitialize_command (self, command, reinit_subcommands=0): - cmd = self.distribution.reinitialize_command( - command, reinit_subcommands) - if command in ('install', 'install_lib'): - cmd.install_lib = None # work around distutils bug - return cmd - - def run(self): - self._is_running = True - try: - _bdist_wininst.run(self) - self._fix_upload_names() - finally: - self._is_running = False - - - if not hasattr(_bdist_wininst, 'get_installer_filename'): - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - # if we create an installer for a specific python version, - # it's better to include this in the name - installer_name = os.path.join(self.dist_dir, - "%s.win32-py%s.exe" % - (fullname, self.target_version)) - else: - installer_name = os.path.join(self.dist_dir, - "%s.win32.exe" % fullname) - return installer_name - # get_installer_filename() - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/build_ext.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/build_ext.py deleted file mode 100644 index e0d5284..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/build_ext.py +++ /dev/null @@ -1,285 +0,0 @@ -from distutils.command.build_ext import build_ext as _du_build_ext -try: - # Attempt to use Pyrex for building extensions, if available - from Pyrex.Distutils.build_ext import build_ext as _build_ext -except ImportError: - _build_ext = _du_build_ext - -import os, sys -from distutils.file_util import copy_file -from setuptools.extension import Library -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler, get_config_var -get_config_var("LDSHARED") # make sure _config_vars is initialized -from distutils.sysconfig import _config_vars -from distutils import log -from distutils.errors import * - -have_rtld = False -use_stubs = False -libtype = 'shared' - -if sys.platform == "darwin": - use_stubs = True -elif os.name != 'nt': - try: - from dl import RTLD_NOW - have_rtld = True - use_stubs = True - except ImportError: - pass - -def if_dl(s): - if have_rtld: - return s - return '' - - - - - - -class build_ext(_build_ext): - def run(self): - """Build extensions in build directory, then copy if --inplace""" - old_inplace, self.inplace = self.inplace, 0 - _build_ext.run(self) - self.inplace = old_inplace - if old_inplace: - self.copy_extensions_to_source() - - def copy_extensions_to_source(self): - build_py = self.get_finalized_command('build_py') - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir,os.path.basename(filename)) - src_filename = os.path.join(self.build_lib,filename) - - # Always copy, even if source is older than destination, to ensure - # that the right extensions for the current Python/platform are - # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) - if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) - - - if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'): - # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 - def swig_sources(self, sources, *otherargs): - # first do any Pyrex processing - sources = _build_ext.swig_sources(self, sources) or sources - # Then do any actual SWIG stuff on the remainder - return _du_build_ext.swig_sources(self, sources, *otherargs) - - - - def get_ext_filename(self, fullname): - filename = _build_ext.get_ext_filename(self,fullname) - if fullname in self.ext_map: - ext = self.ext_map[fullname] - if isinstance(ext,Library): - fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn,libtype) - elif use_stubs and ext._links_to_dynamic: - d,fn = os.path.split(filename) - return os.path.join(d,'dl-'+fn) - return filename - - def initialize_options(self): - _build_ext.initialize_options(self) - self.shlib_compiler = None - self.shlibs = [] - self.ext_map = {} - - def finalize_options(self): - _build_ext.finalize_options(self) - self.extensions = self.extensions or [] - self.check_extensions_list(self.extensions) - self.shlibs = [ext for ext in self.extensions - if isinstance(ext,Library)] - if self.shlibs: - self.setup_shlib_compiler() - for ext in self.extensions: - ext._full_name = self.get_ext_fullname(ext.name) - for ext in self.extensions: - fullname = ext._full_name - self.ext_map[fullname] = ext - ltd = ext._links_to_dynamic = \ - self.shlibs and self.links_to_dynamic(ext) or False - ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library) - filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib,filename)) - if ltd and libdir not in ext.library_dirs: - ext.library_dirs.append(libdir) - if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: - ext.runtime_library_dirs.append(os.curdir) - - def setup_shlib_compiler(self): - compiler = self.shlib_compiler = new_compiler( - compiler=self.compiler, dry_run=self.dry_run, force=self.force - ) - if sys.platform == "darwin": - tmp = _config_vars.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" - _config_vars['CCSHARED'] = " -dynamiclib" - _config_vars['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _config_vars.clear() - _config_vars.update(tmp) - else: - customize_compiler(compiler) - - if self.include_dirs is not None: - compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: - compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - compiler.undefine_macro(macro) - if self.libraries is not None: - compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - compiler.set_link_objects(self.link_objects) - - # hack so distutils' build_extension() builds a library instead - compiler.link_shared_object = link_shared_object.__get__(compiler) - - - - def get_export_symbols(self, ext): - if isinstance(ext,Library): - return ext.export_symbols - return _build_ext.get_export_symbols(self,ext) - - def build_extension(self, ext): - _compiler = self.compiler - try: - if isinstance(ext,Library): - self.compiler = self.shlib_compiler - _build_ext.build_extension(self,ext) - if ext._needs_stub: - self.write_stub( - self.get_finalized_command('build_py').build_lib, ext - ) - finally: - self.compiler = _compiler - - def links_to_dynamic(self, ext): - """Return true if 'ext' links to a dynamic lib in the same package""" - # XXX this should check to ensure the lib is actually being built - # XXX as dynamic, and not just using a locally-found version or a - # XXX static-compiled version - libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1]+['']) - for libname in ext.libraries: - if pkg+libname in libnames: return True - return False - - def get_outputs(self): - outputs = _build_ext.get_outputs(self) - optimize = self.get_finalized_command('build_py').optimize - for ext in self.extensions: - if ext._needs_stub: - base = os.path.join(self.build_lib, *ext._full_name.split('.')) - outputs.append(base+'.py') - outputs.append(base+'.pyc') - if optimize: - outputs.append(base+'.pyo') - return outputs - - def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s",ext._full_name, output_dir) - stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py' - if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file+" already exists! Please delete.") - if not self.dry_run: - f = open(stub_file,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, imp"+if_dl(", dl"), - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " imp.load_dynamic(__name__,__file__)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - if compile: - from distutils.util import byte_compile - byte_compile([stub_file], optimize=0, - force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) - - -if use_stubs or os.name=='nt': - # Build shared libraries - # - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): self.link( - self.SHARED_LIBRARY, objects, output_libname, - output_dir, libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, extra_preargs, extra_postargs, - build_temp, target_lang - ) -else: - # Build static libraries everywhere else - libtype = 'static' - - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): - # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - #libraries=None, library_dirs=None, runtime_library_dirs=None, - #export_symbols=None, extra_preargs=None, extra_postargs=None, - #build_temp=None - - assert output_dir is None # distutils build_ext doesn't pass this - output_dir,filename = os.path.split(output_libname) - basename, ext = os.path.splitext(filename) - if self.library_filename("x").startswith('lib'): - # strip 'lib' prefix; this is kludgy if some platform uses - # a different prefix - basename = basename[3:] - - self.create_static_lib( - objects, basename, output_dir, debug, target_lang - ) diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/build_py.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/build_py.py deleted file mode 100644 index c5e3d7f..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/build_py.py +++ /dev/null @@ -1,211 +0,0 @@ -import os.path, sys, fnmatch -from distutils.command.build_py import build_py as _build_py -from distutils.util import convert_path -from glob import glob - -class build_py(_build_py): - """Enhanced 'build_py' command that includes data files with packages - - The data files are specified via a 'package_data' argument to 'setup()'. - See 'setuptools.dist.Distribution' for more details. - - Also, this version of the 'build_py' command allows you to specify both - 'py_modules' and 'packages' in the same setup operation. - """ - def finalize_options(self): - _build_py.finalize_options(self) - self.package_data = self.distribution.package_data - self.exclude_package_data = self.distribution.exclude_package_data or {} - if 'data_files' in self.__dict__: del self.__dict__['data_files'] - - def run(self): - self.old_run() - if sys.platform == "win32": - from setuptools.command.scriptsetup import do_scriptsetup - do_scriptsetup() - - def old_run(self): - """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: - return - - if self.py_modules: - self.build_modules() - - if self.packages: - self.build_packages() - self.build_package_data() - - # Only compile actual .py files, using our base class' idea of what our - # output files are. - self.byte_compile(_build_py.get_outputs(self, include_bytecode=0)) - - def __getattr__(self,attr): - if attr=='data_files': # lazily compute data files - self.data_files = files = self._get_data_files(); return files - return _build_py.__getattr__(self,attr) - - def _get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - self.analyze_manifest() - data = [] - for package in self.packages or (): - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = len(src_dir)+1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append( (package, src_dir, build_dir, filenames) ) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = self.manifest_files.get(package, [])[:] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) - return self.exclude_data_files(package, src_dir, files) - - def build_package_data(self): - """Copy data files into build directory""" - lastdir = None - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - self.copy_file(os.path.join(src_dir, filename), target) - - - def analyze_manifest(self): - self.manifest_files = mf = {} - if not self.distribution.include_package_data: - return - src_dirs = {} - for package in self.packages or (): - # Locate package source directory - src_dirs[assert_relative(self.get_package_dir(package))] = package - - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: - d,f = os.path.split(assert_relative(path)) - prev = None - oldf = f - while d and d!=prev and d not in src_dirs: - prev = d - d, df = os.path.split(d) - f = os.path.join(df, f) - if d in src_dirs: - if path.endswith('.py') and f==oldf: - continue # it's a module, not data - mf.setdefault(src_dirs[d],[]).append(path) - - def get_data_files(self): pass # kludge 2.4 for lazy computation - - if sys.version<"2.4": # Python 2.4 already has this code - def get_outputs(self, include_bytecode=1): - """Return complete list of files copied to the build directory - - This includes both '.py' files and data files, as well as '.pyc' - and '.pyo' files if 'include_bytecode' is true. (This method is - needed for the 'install_lib' command to do its job properly, and to - generate a correct installation manifest.) - """ - return _build_py.get_outputs(self, include_bytecode) + [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir,filenames in self.data_files - for filename in filenames - ] - - def check_package(self, package, package_dir): - """Check namespace packages' __init__ for declare_namespace""" - try: - return self.packages_checked[package] - except KeyError: - pass - - init_py = _build_py.check_package(self, package, package_dir) - self.packages_checked[package] = init_py - - if not init_py or not self.distribution.namespace_packages: - return init_py - - for pkg in self.distribution.namespace_packages: - if pkg==package or pkg.startswith(package+'.'): - break - else: - return init_py - - f = open(init_py,'rU') - if 'declare_namespace' not in f.read(): - from distutils.errors import DistutilsError - raise DistutilsError( - "Namespace package problem: %s is a namespace package, but its\n" - "__init__.py does not call declare_namespace()! Please fix it.\n" - '(See the setuptools manual under "Namespace Packages" for ' - "details.)\n" % (package,) - ) - f.close() - return init_py - - def initialize_options(self): - self.packages_checked={} - _build_py.initialize_options(self) - - - - - - - - def exclude_data_files(self, package, src_dir, files): - """Filter filenames for package's data files in 'src_dir'""" - globs = (self.exclude_package_data.get('', []) - + self.exclude_package_data.get(package, [])) - bad = [] - for pattern in globs: - bad.extend( - fnmatch.filter( - files, os.path.join(src_dir, convert_path(pattern)) - ) - ) - bad = dict.fromkeys(bad) - seen = {} - return [ - f for f in files if f not in bad - and f not in seen and seen.setdefault(f,1) # ditch dupes - ] - - -def assert_relative(path): - if not os.path.isabs(path): - return path - from distutils.errors import DistutilsSetupError - raise DistutilsSetupError( -"""Error: setup script specifies an absolute path: - - %s - -setup() arguments must *always* be /-separated paths relative to the -setup.py directory, *never* absolute paths. -""" % path - ) - - - - - - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/develop.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/develop.py deleted file mode 100644 index 303f488..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/develop.py +++ /dev/null @@ -1,165 +0,0 @@ -from setuptools.command.easy_install import easy_install -from distutils.util import convert_path -from pkg_resources import Distribution, PathMetadata, normalize_path -from distutils import log -from distutils.errors import * -import sys, os, setuptools, glob - -class develop(easy_install): - """Set up package for development""" - - description = "install package in 'development mode'" - - user_options = easy_install.user_options + [ - ("uninstall", "u", "Uninstall this source package"), - ("egg-path=", None, "Set the path to be used in the .egg-link file"), - ] - - boolean_options = easy_install.boolean_options + ['uninstall'] - - command_consumes_arguments = False # override base - - def run(self): - self.old_run() - if sys.platform == "win32": - from setuptools.command.scriptsetup import do_scriptsetup - do_scriptsetup() - - def old_run(self): - if self.uninstall: - self.multi_version = True - self.uninstall_link() - else: - self.install_for_development() - self.warn_deprecated_options() - - def initialize_options(self): - self.uninstall = None - self.egg_path = None - easy_install.initialize_options(self) - self.setup_path = None - self.always_copy_from = '.' # always copy eggs installed in curdir - - def finalize_options(self): - ei = self.get_finalized_command("egg_info") - if ei.broken_egg_info: - raise DistutilsError( - "Please rename %r to %r before using 'develop'" - % (ei.egg_info, ei.broken_egg_info) - ) - self.args = [ei.egg_name] - easy_install.finalize_options(self) - # pick up setup-dir .egg files only: no .egg-info - self.package_index.scan(glob.glob('*.egg')) - - self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link') - self.egg_base = ei.egg_base - if self.egg_path is None: - self.egg_path = os.path.abspath(ei.egg_base) - - target = normalize_path(self.egg_base) - if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target: - raise DistutilsOptionError( - "--egg-path must be a relative path from the install" - " directory to "+target - ) - - # Make a distribution for the package's source - self.dist = Distribution( - target, - PathMetadata(target, os.path.abspath(ei.egg_info)), - project_name = ei.egg_name - ) - - p = self.egg_base.replace(os.sep,'/') - if p!= os.curdir: - p = '../' * (p.count('/')+1) - self.setup_path = p - p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) - if p != normalize_path(os.curdir): - raise DistutilsOptionError( - "Can't get a consistent path to setup script from" - " installation directory", p, normalize_path(os.curdir)) - - def install_for_development(self): - # Ensure metadata is up-to-date - self.run_command('egg_info') - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - self.install_site_py() # ensure that target dir is site-safe - if setuptools.bootstrap_install_from: - self.easy_install(setuptools.bootstrap_install_from) - setuptools.bootstrap_install_from = None - - # create an .egg-link in the installation dir, pointing to our egg - log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) - if not self.dry_run: - f = open(self.egg_link,"w") - f.write(self.egg_path + "\n" + self.setup_path) - f.close() - # postprocess the installed distro, fixing up .pth, installing scripts, - # and handling requirements - self.process_distribution(None, self.dist, not self.no_deps) - - - def uninstall_link(self): - if os.path.exists(self.egg_link): - log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) - contents = [line.rstrip() for line in file(self.egg_link)] - if contents not in ([self.egg_path], [self.egg_path, self.setup_path]): - log.warn("Link points to %s: uninstall aborted", contents) - return - if not self.dry_run: - os.unlink(self.egg_link) - if not self.dry_run: - self.update_pth(self.dist) # remove any .pth link to us - if self.distribution.scripts: - # XXX should also check for entry point scripts! - log.warn("Note: you must uninstall or replace scripts manually!") - - - - - - def install_egg_scripts(self, dist): - if dist is not self.dist: - # Installing a dependency, so fall back to normal behavior - return easy_install.install_egg_scripts(self,dist) - - # create wrapper scripts in the script dir, pointing to dist.scripts - - # new-style... - self.install_wrapper_scripts(dist) - - # ...and old-style - for script_name in self.distribution.scripts or []: - script_path = os.path.abspath(convert_path(script_name)) - script_name = os.path.basename(script_path) - f = open(script_path,'rU') - script_text = f.read() - f.close() - self.install_script(dist, script_name, script_text, script_path) - - - - - - - - - - - - - - - - - - - - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/easy_install.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/easy_install.py deleted file mode 100755 index 8ac42b4..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/easy_install.py +++ /dev/null @@ -1,1739 +0,0 @@ -#!python -"""\ -Easy Install ------------- - -A tool for doing automatic download/extract/build of distutils-based Python -packages. For detailed documentation, see the accompanying EasyInstall.txt -file, or visit the `EasyInstall home page`__. - -__ http://peak.telecommunity.com/DevCenter/EasyInstall -""" -import sys, os.path, zipimport, shutil, tempfile, zipfile, re, stat, random -from glob import glob -from setuptools import Command, _dont_write_bytecode -from setuptools import __version__ as setuptools_version -from setuptools.sandbox import run_setup -from distutils import log, dir_util -from distutils.sysconfig import get_python_lib -from distutils.errors import DistutilsArgError, DistutilsOptionError, \ - DistutilsError -from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex, parse_bdist_wininst -from setuptools.package_index import URL_SCHEME -from setuptools.command import bdist_egg, egg_info -from pkg_resources import * -sys_executable = os.path.normpath(sys.executable) - -__all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', - 'main', 'get_exe_prefixes', -] - -def samefile(p1,p2): - if hasattr(os.path,'samefile') and ( - os.path.exists(p1) and os.path.exists(p2) - ): - return os.path.samefile(p1,p2) - return ( - os.path.normpath(os.path.normcase(p1)) == - os.path.normpath(os.path.normcase(p2)) - ) - -class easy_install(Command): - """Manage a download/build/install process""" - description = "Find/get/install Python packages" - command_consumes_arguments = True - - user_options = [ - ('prefix=', None, "installation prefix"), - ("zip-ok", "z", "install package as a zipfile"), - ("multi-version", "m", "make apps have to require() a version"), - ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), - ("install-dir=", "d", "install package to DIR"), - ("script-dir=", "s", "install scripts to DIR"), - ("exclude-scripts", "x", "Don't install scripts"), - ("always-copy", "a", "Copy all needed packages to install dir"), - ("index-url=", "i", "base URL of Python Package Index"), - ("find-links=", "f", "additional URL(s) to search for packages"), - ("delete-conflicting", "D", "no longer needed; don't use this"), - ("ignore-conflicts-at-my-risk", None, - "no longer needed; don't use this"), - ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('record=', None, - "filename in which to record list of installed files"), - ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=','S',"list of directories where .pth files work"), - ('editable', 'e', "Install specified packages in editable form"), - ('no-deps', 'N', "don't install dependencies"), - ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), - ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"), - ] - boolean_options = [ - 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', - 'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable', - 'no-deps', 'local-snapshots-ok', - ] - negative_opt = {'always-unzip': 'zip-ok'} - create_index = PackageIndex - - def initialize_options(self): - self.zip_ok = self.local_snapshots_ok = None - self.install_dir = self.script_dir = self.exclude_scripts = None - self.index_url = None - self.find_links = None - self.build_directory = None - self.args = None - self.optimize = self.record = None - self.upgrade = self.always_copy = self.multi_version = None - self.editable = self.no_deps = self.allow_hosts = None - self.root = self.prefix = self.no_report = None - - # Options not specifiable via command line - self.package_index = None - self.pth_file = self.always_copy_from = None - self.delete_conflicting = None - self.ignore_conflicts_at_my_risk = None - self.site_dirs = None - self.installed_projects = {} - self.sitepy_installed = False - # Always read easy_install options, even if we are subclassed, or have - # an independent instance created. This ensures that defaults will - # always come from the standard configuration file(s)' "easy_install" - # section, even if this is a "develop" or "install" command, or some - # other embedding. - self._dry_run = None - self.verbose = self.distribution.verbose - self.distribution._set_command_options( - self, self.distribution.get_option_dict('easy_install') - ) - - def delete_blockers(self, blockers): - for filename in blockers: - if os.path.exists(filename) or os.path.islink(filename): - log.info("Deleting %s", filename) - if not self.dry_run: - if os.path.isdir(filename) and not os.path.islink(filename): - rmtree(filename) - else: - os.unlink(filename) - - def finalize_options(self): - self._expand('install_dir','script_dir','build_directory','site_dirs') - # If a non-default installation directory was specified, default the - # script directory to match it. - if self.script_dir is None: - self.script_dir = self.install_dir - - # Let install_dir get set by install_lib command, which in turn - # gets its info from the install command, and takes into account - # --prefix and --home and all that other crud. - self.set_undefined_options('install_lib', - ('install_dir','install_dir') - ) - # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options('install_scripts', - ('install_dir', 'script_dir') - ) - # default --record from the install command - self.set_undefined_options('install', ('record', 'record')) - normpath = map(normalize_path, sys.path) - self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d+" (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) - if not self.editable: self.check_site_dir() - self.index_url = self.index_url or "http://pypi.python.org/simple" - self.shadow_path = self.all_site_dirs[:] - for path_item in self.install_dir, normalize_path(self.script_dir): - if path_item not in self.shadow_path: - self.shadow_path.insert(0, path_item) - - if self.allow_hosts is not None: - hosts = [s.strip() for s in self.allow_hosts.split(',')] - else: - hosts = ['*'] - if self.package_index is None: - self.package_index = self.create_index( - self.index_url, search_path = self.shadow_path+sys.path, hosts=hosts, - ) - self.local_index = Environment(self.shadow_path+sys.path) - - if self.find_links is not None: - if isinstance(self.find_links, basestring): - self.find_links = self.find_links.split() - else: - self.find_links = [] - if self.local_snapshots_ok: - self.package_index.scan_egg_links(self.shadow_path+sys.path) - self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize','optimize')) - if not isinstance(self.optimize,int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): raise ValueError - except ValueError: - raise DistutilsOptionError("--optimize must be 0, 1, or 2") - - if self.delete_conflicting and self.ignore_conflicts_at_my_risk: - raise DistutilsOptionError( - "Can't use both --delete-conflicting and " - "--ignore-conflicts-at-my-risk at the same time" - ) - if self.editable and not self.build_directory: - raise DistutilsArgError( - "Must specify a build directory (-b) when using --editable" - ) - if not self.args: - raise DistutilsArgError( - "No urls, filenames, or requirements specified (see --help)") - - self.outputs = [] - - def run(self): - if self.verbose!=self.distribution.verbose: - log.set_verbosity(self.verbose) - try: - for spec in self.args: - self.easy_install(spec, not self.no_deps) - if self.record: - outputs = self.outputs - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in xrange(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - from distutils import file_util - self.execute( - file_util.write_file, (self.record, outputs), - "writing list of installed files to '%s'" % - self.record - ) - self.warn_deprecated_options() - finally: - log.set_verbosity(self.distribution.verbose) - - def pseudo_tempname(self): - """Return a pseudo-tempname base in the install directory. - This code is intentionally naive; if a malicious party can write to - the target directory you're already in deep doodoo. - """ - try: - pid = os.getpid() - except: - pid = random.randint(0,sys.maxint) - return os.path.join(self.install_dir, "test-easy-install-%s" % pid) - - def warn_deprecated_options(self): - if self.delete_conflicting or self.ignore_conflicts_at_my_risk: - log.warn( - "Note: The -D, --delete-conflicting and" - " --ignore-conflicts-at-my-risk no longer have any purpose" - " and should not be used." - ) - - def check_site_dir(self): - """Verify that self.install_dir is .pth-capable dir, if needed""" - instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir,'easy-install.pth') - - # mkdir it if necessary - try: - os.makedirs(instdir) - except OSError: - # Oh well -- hopefully this error simply means that it is already there. - # If not the subsequent write test will identify the problem. - pass - # add it to site dirs - self.all_site_dirs.append(instdir) - - # Is it a configured, PYTHONPATH, implicit, or explicit site dir? - is_site_dir = instdir in self.all_site_dirs - - if not is_site_dir and not self.multi_version: - # No? Then directly test whether it does .pth file processing - is_site_dir = self.check_pth_processing() - else: - # make sure we can write to target dir - testfile = self.pseudo_tempname()+'.write-test' - test_exists = os.path.exists(testfile) - try: - if test_exists: os.unlink(testfile) - open(testfile,'w').close() - os.unlink(testfile) - except (OSError,IOError): - self.cant_write_to_target() - - if not is_site_dir and not self.multi_version: - # Can't install non-multi to non-site dir - log.warn(self.no_default_version_msg()) - - if is_site_dir: - if self.pth_file is None: - self.pth_file = PthDistributions(pth_file, self.all_site_dirs) - else: - self.pth_file = None - - if self.multi_version and not os.path.exists(pth_file): - self.sitepy_installed = True # don't need site.py in this case - self.pth_file = None # and don't create a .pth file - self.install_dir = instdir - - def cant_write_to_target(self): - msg = """can't create or remove files in install directory - -The following error occurred while trying to add or remove files in the -installation directory: - - %s - -The installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s -""" % (sys.exc_info()[1], self.install_dir,) - - if not os.path.exists(self.install_dir): - msg += """ -This directory does not currently exist. Please create it and try again, or -choose a different installation directory (using the -d or --install-dir -option). -""" - else: - msg += """ -Perhaps your account does not have write access to this directory? If the -installation directory is a system-owned directory, you may need to sign in -as the administrator or "root" account. If you do not have administrative -access to this machine, you may wish to choose a different installation -directory, preferably one that is listed in your PYTHONPATH environment -variable. - -For information on other options, you may wish to consult the -documentation at: - - http://peak.telecommunity.com/EasyInstall.html - -Please make the appropriate changes for your system and try again. -""" - raise DistutilsError(msg) - - - - - def check_pth_processing(self): - """Empirically verify whether .pth files are supported in inst. dir""" - instdir = self.install_dir - log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname()+".pth" - ok_file = pth_file+'.ok' - ok_exists = os.path.exists(ok_file) - try: - if ok_exists: os.unlink(ok_file) - f = open(pth_file,'w') - except (OSError,IOError): - self.cant_write_to_target() - else: - try: - f.write("import os;open(%r,'w').write('OK')\n" % (ok_file,)) - f.close(); f=None - executable = sys.executable - if os.name=='nt': - dirname,basename = os.path.split(executable) - alt = os.path.join(dirname,'pythonw.exe') - if basename.lower()=='python.exe' and os.path.exists(alt): - # use pythonw.exe to avoid opening a console window - executable = alt - - from distutils.spawn import spawn - spawn([executable,'-E','-c','pass'],0) - - if os.path.exists(ok_file): - log.info( - "TEST PASSED: %s appears to support .pth files", - instdir - ) - return True - finally: - if f: f.close() - if os.path.exists(ok_file): os.unlink(ok_file) - if os.path.exists(pth_file): os.unlink(pth_file) - if not self.multi_version: - log.warn("TEST FAILED: %s does NOT support .pth files", instdir) - return False - - def install_egg_scripts(self, dist): - """Write all the scripts for `dist`, unless scripts are excluded""" - if not self.exclude_scripts and dist.metadata_isdir('scripts'): - for script_name in dist.metadata_listdir('scripts'): - self.install_script( - dist, script_name, - dist.get_metadata('scripts/'+script_name) - ) - self.install_wrapper_scripts(dist) - - def add_output(self, path): - if os.path.isdir(path): - for base, dirs, files in os.walk(path): - for filename in files: - self.outputs.append(os.path.join(base,filename)) - else: - self.outputs.append(path) - - def not_editable(self, spec): - if self.editable: - raise DistutilsArgError( - "Invalid argument %r: you can't use filenames or URLs " - "with --editable (except via the --find-links option)." - % (spec,) - ) - - def check_editable(self,spec): - if not self.editable: - return - - if os.path.exists(os.path.join(self.build_directory, spec.key)): - raise DistutilsArgError( - "%r already exists in %s; can't do a checkout there" % - (spec.key, self.build_directory) - ) - - - - - - - def easy_install(self, spec, deps=False): - tmpdir = tempfile.mkdtemp(prefix="easy_install-") - download = None - if not self.editable: self.install_site_py() - - try: - if not isinstance(spec,Requirement): - if URL_SCHEME(spec): - # It's a url, download it to tmpdir and process - self.not_editable(spec) - download = self.package_index.download(spec, tmpdir) - return self.install_item(None, download, tmpdir, deps, True) - - elif os.path.exists(spec): - # Existing file or directory, just process it directly - self.not_editable(spec) - return self.install_item(None, spec, tmpdir, deps, True) - else: - spec = parse_requirement_arg(spec) - - self.check_editable(spec) - dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, not self.always_copy, - self.local_index - ) - if dist is None: - msg = "Could not find suitable distribution for %r" % spec - if self.always_copy: - msg+=" (--always-copy skips system and development eggs)" - raise DistutilsError(msg) - elif dist.precedence==DEVELOP_DIST: - # .egg-info dists don't need installing, just process deps - self.process_distribution(spec, dist, deps, "Using") - return dist - else: - return self.install_item(spec, dist.location, tmpdir, deps) - - finally: - if os.path.exists(tmpdir): - rmtree(tmpdir) - - def install_item(self, spec, download, tmpdir, deps, install_needed=False): - - # Installation is also needed if file in tmpdir or is not an egg - install_needed = install_needed or self.always_copy - install_needed = install_needed or os.path.dirname(download) == tmpdir - install_needed = install_needed or not download.endswith('.egg') - install_needed = install_needed or ( - self.always_copy_from is not None and - os.path.dirname(normalize_path(download)) == - normalize_path(self.always_copy_from) - ) - - if spec and not install_needed: - # at this point, we know it's a local .egg, we just don't know if - # it's already installed. - for dist in self.local_index[spec.project_name]: - if dist.location==download: - break - else: - install_needed = True # it's not in the local index - - log.info("Processing %s", os.path.basename(download)) - - if install_needed: - dists = self.install_eggs(spec, download, tmpdir) - for dist in dists: - self.process_distribution(spec, dist, deps) - else: - dists = [self.check_conflicts(self.egg_distribution(download))] - self.process_distribution(spec, dists[0], deps, "Using") - - if spec is not None: - for dist in dists: - if dist in spec: - return dist - - - - - - - def process_distribution(self, requirement, dist, deps=True, *info): - self.update_pth(dist) - self.package_index.add(dist) - self.local_index.add(dist) - self.install_egg_scripts(dist) - self.installed_projects[dist.key] = dist - log.info(self.installation_report(requirement, dist, *info)) - if dist.has_metadata('dependency_links.txt'): - self.package_index.add_find_links( - dist.get_metadata_lines('dependency_links.txt') - ) - if not deps and not self.always_copy: - return - elif requirement is not None and dist.key != requirement.key: - log.warn("Skipping dependencies for %s", dist) - return # XXX this is not the distribution we were looking for - elif requirement is None or dist not in requirement: - # if we wound up with a different version, resolve what we've got - distreq = dist.as_requirement() - requirement = requirement or distreq - requirement = Requirement( - distreq.project_name, distreq.specs, requirement.extras - ) - log.info("Processing dependencies for %s", requirement) - try: - distros = WorkingSet([]).resolve( - [requirement], self.local_index, self.easy_install - ) - except DistributionNotFound, e: - raise DistutilsError( - "Could not find required distribution %s" % e.args - ) - except VersionConflict, e: - raise DistutilsError( - "Installed distribution %s conflicts with requirement %s" - % e.args - ) - if self.always_copy or self.always_copy_from: - # Force all the relevant distros to be copied or activated - for dist in distros: - if dist.key not in self.installed_projects: - self.easy_install(dist.as_requirement()) - log.info("Finished processing dependencies for %s", requirement) - - def should_unzip(self, dist): - if self.zip_ok is not None: - return not self.zip_ok - if dist.has_metadata('not-zip-safe'): - return True - if not dist.has_metadata('zip-safe'): - return True - return False - - def maybe_move(self, spec, dist_filename, setup_base): - dst = os.path.join(self.build_directory, spec.key) - if os.path.exists(dst): - log.warn( - "%r already exists in %s; build directory %s will not be kept", - spec.key, self.build_directory, setup_base - ) - return setup_base - if os.path.isdir(dist_filename): - setup_base = dist_filename - else: - if os.path.dirname(dist_filename)==setup_base: - os.unlink(dist_filename) # get it out of the tmp dir - contents = os.listdir(setup_base) - if len(contents)==1: - dist_filename = os.path.join(setup_base,contents[0]) - if os.path.isdir(dist_filename): - # if the only thing there is a directory, move it instead - setup_base = dist_filename - ensure_directory(dst); shutil.move(setup_base, dst) - return dst - - def install_wrapper_scripts(self, dist): - if not self.exclude_scripts: - for args in get_script_args(dist, script_dir=self.script_dir): - self.write_script(*args) - - - - def install_script(self, dist, script_name, script_text, dev_path=None): - """Generate a legacy script wrapper and install it""" - spec = str(dist.as_requirement()) - is_script = is_python_script(script_text, script_name) - - requires = [spec] + [str(r) for r in dist.requires()] - if is_script and dev_path: - script_text = get_script_header(script_text) + ( - "# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r\n" - "__requires__ = %(requires)r\n" - "from pkg_resources import require; require(%(spec)r)\n" - "del require\n" - "__file__ = %(dev_path)r\n" - "execfile(__file__)\n" - ) % locals() - elif is_script: - script_text = get_script_header(script_text) + ( - "# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r\n" - "__requires__ = %(requires)r\n" - "import pkg_resources\n" - "pkg_resources.run_script(%(spec)r, %(script_name)r)\n" - ) % locals() - self.write_script(script_name, script_text, 'b') - - def write_script(self, script_name, contents, mode="t", blockers=()): - """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir,x) for x in blockers]) - log.info("Installing %s script to %s", script_name, self.script_dir) - target = os.path.join(self.script_dir, script_name) - self.add_output(target) - - if not self.dry_run: - ensure_directory(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - chmod(target,0755) - - - - - def install_eggs(self, spec, dist_filename, tmpdir): - # .egg dirs or files are already built, so just return them - if dist_filename.lower().endswith('.egg'): - return [self.install_egg(dist_filename, tmpdir)] - elif dist_filename.lower().endswith('.exe'): - return [self.install_exe(dist_filename, tmpdir)] - - # Anything else, try to extract and build - setup_base = tmpdir - if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): - unpack_archive(dist_filename, tmpdir, self.unpack_progress) - elif os.path.isdir(dist_filename): - setup_base = os.path.abspath(dist_filename) - - if (setup_base.startswith(tmpdir) # something we downloaded - and self.build_directory and spec is not None - ): - setup_base = self.maybe_move(spec, dist_filename, setup_base) - - # Find the setup.py file - setup_script = os.path.join(setup_base, 'setup.py') - - if not os.path.exists(setup_script): - setups = glob(os.path.join(setup_base, '*', 'setup.py')) - if not setups: - raise DistutilsError( - "Couldn't find a setup script in %s" % os.path.abspath(dist_filename) - ) - if len(setups)>1: - raise DistutilsError( - "Multiple setup scripts in %s" % os.path.abspath(dist_filename) - ) - setup_script = setups[0] - - # Now run it, and return the result - if self.editable: - log.info(self.report_editable(spec, setup_script)) - return [] - else: - return self.build_and_install(setup_script, setup_base) - - def egg_distribution(self, egg_path): - if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO')) - else: - metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path,metadata=metadata) - - def install_egg(self, egg_path, tmpdir): - destination = os.path.join(self.install_dir,os.path.basename(egg_path)) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - - dist = self.egg_distribution(egg_path) - self.check_conflicts(dist) - if not samefile(egg_path, destination): - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute(os.unlink,(destination,),"Removing "+destination) - uncache_zipdir(destination) - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f,m = self.unpack_and_compile, "Extracting" - elif egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copy2, "Copying" - - self.execute(f, (egg_path, destination), - (m+" %s to %s") % - (os.path.basename(egg_path),os.path.dirname(destination))) - - self.add_output(destination) - return self.egg_distribution(destination) - - def install_exe(self, dist_filename, tmpdir): - # See if it's valid, get data - cfg = extract_wininst_cfg(dist_filename) - if cfg is None: - raise DistutilsError( - "%s is not a valid distutils Windows .exe" % dist_filename - ) - # Create a dummy distribution object until we build the real distro - dist = Distribution(None, - project_name=cfg.get('metadata','name'), - version=cfg.get('metadata','version'), platform="win32" - ) - - # Convert the .exe to an unpacked egg - egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg') - egg_tmp = egg_path+'.tmp' - egg_info = os.path.join(egg_tmp, 'EGG-INFO') - pkg_inf = os.path.join(egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, egg_info) # XXX - self.exe_to_egg(dist_filename, egg_tmp) - - # Write EGG-INFO/PKG-INFO - if not os.path.exists(pkg_inf): - f = open(pkg_inf,'w') - f.write('Metadata-Version: 1.0\n') - for k,v in cfg.items('metadata'): - if k!='target_version': - f.write('%s: %s\n' % (k.replace('_','-').title(), v)) - f.close() - script_dir = os.path.join(egg_info,'scripts') - self.delete_blockers( # delete entry-point scripts to avoid duping - [os.path.join(script_dir,args[0]) for args in get_script_args(dist)] - ) - # Build .egg file from tmpdir - bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run - ) - # install the .egg - return self.install_egg(egg_path, tmpdir) - - def exe_to_egg(self, dist_filename, egg_tmp): - """Extract a bdist_wininst to the directories an egg would use""" - # Check for .pth file and set up prefix translations - prefixes = get_exe_prefixes(dist_filename) - to_compile = [] - native_libs = [] - top_level = {} - def process(src,dst): - s = src.lower() - for old,new in prefixes: - if s.startswith(old): - src = new+src[len(old):] - parts = src.split('/') - dst = os.path.join(egg_tmp, *parts) - dl = dst.lower() - if dl.endswith('.pyd') or dl.endswith('.dll'): - parts[-1] = bdist_egg.strip_module(parts[-1]) - top_level[os.path.splitext(parts[0])[0]] = 1 - native_libs.append(src) - elif dl.endswith('.py') and old!='SCRIPTS/': - top_level[os.path.splitext(parts[0])[0]] = 1 - to_compile.append(dst) - return dst - if not src.endswith('.pth'): - log.warn("WARNING: can't process %s", src) - return None - # extract, tracking .pyd/.dll->native_libs and .py -> to_compile - unpack_archive(dist_filename, egg_tmp, process) - stubs = [] - for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's - parts = res.split('/') - resource = parts[-1] - parts[-1] = bdist_egg.strip_module(parts[-1])+'.py' - pyfile = os.path.join(egg_tmp, *parts) - to_compile.append(pyfile); stubs.append(pyfile) - bdist_egg.write_stub(resource, pyfile) - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'), - bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - - for name in 'top_level','native_libs': - if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt') - if not os.path.exists(txt): - open(txt,'w').write('\n'.join(locals()[name])+'\n') - - def check_conflicts(self, dist): - """Verify that there are no conflicting "old-style" packages""" - - return dist # XXX temporarily disable until new strategy is stable - from imp import find_module, get_suffixes - from glob import glob - - blockers = [] - names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr - - exts = {'.pyc':1, '.pyo':1} # get_suffixes() might leave one out - for ext,mode,typ in get_suffixes(): - exts[ext] = 1 - - for path,files in expand_paths([self.install_dir]+self.all_site_dirs): - for filename in files: - base,ext = os.path.splitext(filename) - if base in names: - if not ext: - # no extension, check for package - try: - f, filename, descr = find_module(base, [path]) - except ImportError: - continue - else: - if f: f.close() - if filename not in blockers: - blockers.append(filename) - elif ext in exts and base!='site': # XXX ugh - blockers.append(os.path.join(path,filename)) - if blockers: - self.found_conflicts(dist, blockers) - - return dist - - def found_conflicts(self, dist, blockers): - if self.delete_conflicting: - log.warn("Attempting to delete conflicting packages:") - return self.delete_blockers(blockers) - - msg = """\ -------------------------------------------------------------------------- -CONFLICT WARNING: - -The following modules or packages have the same names as modules or -packages being installed, and will be *before* the installed packages in -Python's search path. You MUST remove all of the relevant files and -directories before you will be able to use the package(s) you are -installing: - - %s - -""" % '\n '.join(blockers) - - if self.ignore_conflicts_at_my_risk: - msg += """\ -(Note: you can run EasyInstall on '%s' with the ---delete-conflicting option to attempt deletion of the above files -and/or directories.) -""" % dist.project_name - else: - msg += """\ -Note: you can attempt this installation again with EasyInstall, and use -either the --delete-conflicting (-D) option or the ---ignore-conflicts-at-my-risk option, to either delete the above files -and directories, or to ignore the conflicts, respectively. Note that if -you ignore the conflicts, the installed package(s) may not work. -""" - msg += """\ -------------------------------------------------------------------------- -""" - sys.stderr.write(msg) - sys.stderr.flush() - if not self.ignore_conflicts_at_my_risk: - raise DistutilsError("Installation aborted due to conflicts") - - def installation_report(self, req, dist, what="Installed"): - """Helpful installation message for display to package users""" - msg = "\n%(what)s %(eggloc)s%(extras)s" - if self.multi_version and not self.no_report: - msg += """ - -Because this distribution was installed --multi-version, before you can -import modules from this package in an application, you will need to -'import pkg_resources' and then use a 'require()' call similar to one of -these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher -""" - if self.install_dir not in map(normalize_path,sys.path): - msg += """ - -Note also that the installation directory must be on sys.path at runtime for -this to work. (e.g. by being the application's script directory, by being on -PYTHONPATH, or by being added to sys.path by your code.) -""" - eggloc = dist.location - name = dist.project_name - version = dist.version - extras = '' # TODO: self.report_extras(req, dist) - return msg % locals() - - def report_editable(self, spec, setup_script): - dirname = os.path.dirname(setup_script) - python = sys.executable - return """\nExtracted editable version of %(spec)s to %(dirname)s - -If it uses setuptools in its setup script, you can activate it in -"development" mode by going to that directory and running:: - - %(python)s setup.py develop - -See the setuptools documentation for the "develop" command for more info. -""" % locals() - - def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose>2: - v = 'v' * (self.verbose - 1) - args.insert(0,'-'+v) - elif self.verbose<2: - args.insert(0,'-q') - if self.dry_run: - args.insert(0,'-n') - log.info( - "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit, v: - raise DistutilsError("Setup script exited with %s" % (v.args[0],)) - - def build_and_install(self, setup_script, setup_base): - args = ['bdist_egg', '--dist-dir'] - dist_dir = tempfile.mkdtemp( - prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) - ) - try: - args.append(dist_dir) - self.run_setup(setup_script, setup_base, args) - all_eggs = Environment([dist_dir]) - eggs = [] - for key in all_eggs: - for dist in all_eggs[key]: - eggs.append(self.install_egg(dist.location, setup_base)) - if not eggs and not self.dry_run: - log.warn("No eggs found in %s (setup script problem?)", - dist_dir) - return eggs - finally: - rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity - - def update_pth(self,dist): - if self.pth_file is None: - return - - for d in self.pth_file[dist.key]: # drop old entries - if self.multi_version or d.location != dist.location: - log.info("Removing %s from easy-install.pth file", d) - self.pth_file.remove(d) - if d.location in self.shadow_path: - self.shadow_path.remove(d.location) - - if not self.multi_version: - if dist.location in self.pth_file.paths: - log.info( - "%s is already the active version in easy-install.pth", - dist - ) - else: - log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry - if dist.location not in self.shadow_path: - self.shadow_path.append(dist.location) - - if not self.dry_run: - - self.pth_file.save() - - if dist.key=='setuptools': - # Ensure that setuptools itself never becomes unavailable! - # XXX should this check for latest version? - filename = os.path.join(self.install_dir,'setuptools.pth') - if os.path.islink(filename): os.unlink(filename) - f = open(filename, 'wt') - f.write(self.pth_file.make_relative(dist.location)+'\n') - f.close() - - def unpack_progress(self, src, dst): - # Progress filter for unpacking - log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run - - def unpack_and_compile(self, egg_path, destination): - to_compile = []; to_chmod = [] - - def pf(src,dst): - if dst.endswith('.py') and not src.startswith('EGG-INFO/'): - to_compile.append(dst) - elif dst.endswith('.dll') or dst.endswith('.so'): - to_chmod.append(dst) - self.unpack_progress(src,dst) - return not self.dry_run and dst or None - - unpack_archive(egg_path, destination, pf) - self.byte_compile(to_compile) - if not self.dry_run: - for f in to_chmod: - mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755 - chmod(f, mode) - - def byte_compile(self, to_compile): - if _dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - from distutils.util import byte_compile - try: - # try to make the byte compile messages quieter - log.set_verbosity(self.verbose - 1) - - byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) - if self.optimize: - byte_compile( - to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run - ) - finally: - log.set_verbosity(self.verbose) # restore original verbosity - - - - - - - - - - def no_default_version_msg(self): - return """bad install directory or PYTHONPATH - -You are attempting to install a package to a directory that is not -on PYTHONPATH and which Python does not read ".pth" files from. The -installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s - -and your PYTHONPATH environment variable currently contains: - - %r - -Here are some of your options for correcting the problem: - -* You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - -* You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - -* You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - http://peak.telecommunity.com/EasyInstall.html#custom-installation-locations - -Proceeding to install. Please remember that unless you make one of -these changes you will not be able to run the installed code. -""" % ( - self.install_dir, os.environ.get('PYTHONPATH','') - ) - - - - - - - - - - - def install_site_py(self): - """Make sure there's a site.py in the target dir, if needed""" - - if self.sitepy_installed: - return # already did it, or don't need to - - sitepy = os.path.join(self.install_dir, "site.py") - source = resource_string("setuptools", "site-patch.py") - current = "" - - if os.path.exists(sitepy): - log.debug("Checking existing site.py in %s", self.install_dir) - current = open(sitepy,'rb').read() - if not current.startswith('def __boot():'): - print ("\n" - "***********************************************************************\n" - "Warning: %s is not a\n" - "setuptools-generated site.py. It will not be overwritten.\n" - "***********************************************************************\n" - ) % (sitepy,) - self.sitepy_installed = True - return - - if current != source: - log.info("Creating %s", sitepy) - if not self.dry_run: - ensure_directory(sitepy) - f = open(sitepy,'wb') - f.write(source) - f.close() - self.byte_compile([sitepy]) - - self.sitepy_installed = True - - - - - - - - - - - - - INSTALL_SCHEMES = dict( - posix = dict( - install_dir = '$base/lib/python$py_version_short/site-packages', - script_dir = '$base/bin', - ), - ) - - DEFAULT_SCHEME = dict( - install_dir = '$base/Lib/site-packages', - script_dir = '$base/Scripts', - ) - - def _expand(self, *attrs): - config_vars = self.get_finalized_command('install').config_vars - - if self.prefix: - # Set default install_dir/scripts from --prefix - config_vars = config_vars.copy() - config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME) - for attr,val in scheme.items(): - if getattr(self,attr,None) is None: - setattr(self,attr,val) - - from distutils.util import subst_vars - for attr in attrs: - val = getattr(self, attr) - if val is not None: - val = subst_vars(val, config_vars) - if os.name == 'posix': - val = os.path.expanduser(val) - setattr(self, attr, val) - - - - - - - - - -def get_site_dirs(): - # return a list of 'site' dirs - sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep)) - prefixes = [sys.prefix] - if sys.exec_prefix != sys.prefix: - prefixes.append(sys.exec_prefix) - for prefix in prefixes: - if prefix: - if sys.platform in ('os2emx', 'riscos'): - sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitedirs.extend([os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python")]) - else: - sitedirs.extend( - [prefix, os.path.join(prefix, "lib", "site-packages")] - ) - if sys.platform == 'darwin': - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' in prefix: - home = os.environ.get('HOME') - if home: - sitedirs.append( - os.path.join(home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages')) - for plat_specific in (0,1): - site_lib = get_python_lib(plat_specific) - if site_lib not in sitedirs: sitedirs.append(site_lib) - - sitedirs = map(normalize_path, sitedirs) - return sitedirs - - -def expand_paths(inputs): - """Yield sys.path directories that might contain "old-style" packages""" - - seen = {} - - for dirname in inputs: - dirname = normalize_path(dirname) - if dirname in seen: - continue - - seen[dirname] = 1 - if not os.path.isdir(dirname): - continue - - files = os.listdir(dirname) - yield dirname, files - - for name in files: - if not name.endswith('.pth'): - # We only care about the .pth files - continue - if name in ('easy-install.pth','setuptools.pth'): - # Ignore .pth files that we control - continue - - # Read the .pth file - f = open(os.path.join(dirname,name)) - lines = list(yield_lines(f)) - f.close() - - # Yield existing non-dupe, non-import directory lines from it - for line in lines: - if not line.startswith("import"): - line = normalize_path(line.rstrip()) - if line not in seen: - seen[line] = 1 - if not os.path.isdir(line): - continue - yield line, os.listdir(line) - - -def extract_wininst_cfg(dist_filename): - """Extract configuration data from a bdist_wininst .exe - - Returns a ConfigParser.RawConfigParser, or None - """ - f = open(dist_filename,'rb') - try: - endrec = zipfile._EndRecData(f) - if endrec is None: - return None - - prepended = (endrec[9] - endrec[5]) - endrec[6] - if prepended < 12: # no wininst data here - return None - f.seek(prepended-12) - - import struct, StringIO, ConfigParser - tag, cfglen, bmlen = struct.unpack("egg path translations for a given .exe file""" - - prefixes = [ - ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), - ('PLATLIB/', ''), - ('SCRIPTS/', 'EGG-INFO/scripts/') - ] - z = zipfile.ZipFile(exe_filename) - try: - for info in z.infolist(): - name = info.filename - parts = name.split('/') - if len(parts)==3 and parts[2]=='PKG-INFO': - if parts[1].endswith('.egg-info'): - prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/')) - break - if len(parts)!=2 or not name.endswith('.pth'): - continue - if name.endswith('-nspkg.pth'): - continue - if parts[0].upper() in ('PURELIB','PLATLIB'): - for pth in yield_lines(z.read(name)): - pth = pth.strip().replace('\\','/') - if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0],pth)), '')) - finally: - z.close() - prefixes = [(x.lower(),y) for x, y in prefixes] - prefixes.sort(); prefixes.reverse() - return prefixes - - -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - -class PthDistributions(Environment): - """A .pth file with Distribution paths in it""" - - dirty = False - - def __init__(self, filename, sitedirs=()): - self.filename = filename; self.sitedirs=map(normalize_path, sitedirs) - self.basedir = normalize_path(os.path.dirname(self.filename)) - self._load(); Environment.__init__(self, [], None, None) - for path in yield_lines(self.paths): - map(self.add, find_distributions(path, True)) - - def _load(self): - self.paths = [] - saw_import = False - seen = dict.fromkeys(self.sitedirs) - if os.path.isfile(self.filename): - for line in open(self.filename,'rt'): - if line.startswith('import'): - saw_import = True - continue - path = line.rstrip() - self.paths.append(path) - if not path.strip() or path.strip().startswith('#'): - continue - # skip non-existent paths, in case somebody deleted a package - # manually, and duplicate paths as well - path = self.paths[-1] = normalize_path( - os.path.join(self.basedir,path) - ) - if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) - continue - seen[path] = 1 - - if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers - while self.paths and not self.paths[-1].strip(): - self.paths.pop() - - def save(self): - """Write changed .pth file back to disk""" - if not self.dirty: - return - - data = '\n'.join(map(self.make_relative,self.paths)) - if data: - log.debug("Saving %s", self.filename) - data = ( - "import sys; sys.__plen = len(sys.path)\n" - "%s\n" - "import sys; new=sys.path[sys.__plen:];" - " del sys.path[sys.__plen:];" - " p=getattr(sys,'__egginsert',len(os.environ.get('PYTHONPATH','').split(os.pathsep))); sys.path[p:p]=new;" - " sys.__egginsert = p+len(new)\n" - ) % data - - if os.path.islink(self.filename): - os.unlink(self.filename) - f = open(self.filename,'wb') - f.write(data); f.close() - - elif os.path.exists(self.filename): - log.debug("Deleting empty %s", self.filename) - os.unlink(self.filename) - - self.dirty = False - - def add(self,dist): - """Add `dist` to the distribution map""" - if dist.location not in self.paths and dist.location not in self.sitedirs: - self.paths.append(dist.location); self.dirty = True - Environment.add(self,dist) - - def remove(self,dist): - """Remove `dist` from the distribution map""" - while dist.location in self.paths: - self.paths.remove(dist.location); self.dirty = True - Environment.remove(self,dist) - - - def make_relative(self,path): - npath, last = os.path.split(normalize_path(path)) - baselen = len(self.basedir) - parts = [last] - sep = os.altsep=='/' and '/' or os.sep - while len(npath)>=baselen: - if npath==self.basedir: - parts.append(os.curdir) - parts.reverse() - return sep.join(parts) - npath, last = os.path.split(npath) - parts.append(last) - else: - return path - -def get_script_header(script_text, executable=sys_executable, wininst=False): - """Create a #! line, getting options (if any) from script_text""" - from distutils.command.build_scripts import first_line_re - first = (script_text+'\n').splitlines()[0] - match = first_line_re.match(first) - options = '' - if match: - options = match.group(1) or '' - if options: options = ' '+options - if wininst: - executable = "python.exe" - else: - executable = nt_quote_arg(executable) - hdr = "#!%(executable)s%(options)s\n" % locals() - if unicode(hdr,'ascii','ignore').encode('ascii') != hdr: - # Non-ascii path to sys.executable, use -x to prevent warnings - if options: - if options.strip().startswith('-'): - options = ' -x'+options.strip()[1:] - # else: punt, we can't do it, let the warning happen anyway - else: - options = ' -x' - executable = fix_jython_executable(executable, options) - hdr = "#!%(executable)s%(options)s\n" % locals() - return hdr - -def auto_chmod(func, arg, exc): - if func is os.remove and os.name=='nt': - chmod(arg, stat.S_IWRITE) - return func(arg) - exc = sys.exc_info() - raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg))) - -def uncache_zipdir(path): - """Ensure that the importer caches dont have stale info for `path`""" - from zipimport import _zip_directory_cache as zdc - _uncache(path, zdc) - _uncache(path, sys.path_importer_cache) - -def _uncache(path, cache): - if path in cache: - del cache[path] - else: - path = normalize_path(path) - for p in cache: - if normalize_path(p)==path: - del cache[p] - return - -def is_python(text, filename=''): - "Is this string a valid Python script?" - try: - compile(text, filename, 'exec') - except (SyntaxError, TypeError): - return False - else: - return True - -def is_sh(executable): - """Determine if the specified executable is a .sh (contains a #! line)""" - try: - fp = open(executable) - magic = fp.read(2) - fp.close() - except (OSError,IOError): return executable - return magic == '#!' - -def nt_quote_arg(arg): - """Quote a command line argument according to Windows parsing rules""" - - result = [] - needquote = False - nb = 0 - - needquote = (" " in arg) or ("\t" in arg) - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - nb += 1 - elif c == '"': - # double preceding backslashes, then add a \" - result.append('\\' * (nb*2) + '\\"') - nb = 0 - else: - if nb: - result.append('\\' * nb) - nb = 0 - result.append(c) - - if nb: - result.append('\\' * nb) - - if needquote: - result.append('\\' * nb) # double the trailing backslashes - result.append('"') - - return ''.join(result) - - - - - - - - - -def is_python_script(script_text, filename): - """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. - """ - if filename.endswith('.py') or filename.endswith('.pyw'): - return True # extension says it's Python - if is_python(script_text, filename): - return True # it's syntactically valid Python - if script_text.startswith('#!'): - # It begins with a '#!' line, so check if 'python' is in it somewhere - return 'python' in script_text.splitlines()[0].lower() - - return False # Not any Python I can recognize - -try: - from os import chmod as _chmod -except ImportError: - # Jython compatibility - def _chmod(*args): pass - -def chmod(path, mode): - log.debug("changing mode of %s to %o", path, mode) - try: - _chmod(path, mode) - except os.error, e: - log.debug("chmod failed: %s", e) - -def fix_jython_executable(executable, options): - if sys.platform.startswith('java') and is_sh(executable): - # Workaround Jython's sys.executable being a .sh (an invalid - # shebang line interpreter) - if options: - # Can't apply the workaround, leave it broken - log.warn("WARNING: Unable to adapt shebang line for Jython," - " the following script is NOT executable\n" - " see http://bugs.jython.org/issue1112 for" - " more information.") - else: - return '/usr/bin/env %s' % executable - return executable - - -def get_script_args(dist, executable=sys_executable, wininst=False, script_dir=None): - """Yield write_script() argument tuples for a distribution's entrypoints""" - spec = str(dist.as_requirement()) - requires = [spec] + [str(r) for r in dist.requires()] - header = get_script_header("", executable, wininst) - generated_by = "# generated by zetuptoolz %s" % (setuptools_version,) - - for group in 'console_scripts', 'gui_scripts': - for name, ep in dist.get_entry_map(group).items(): - script_head, script_tail = (( - "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n" - "%(generated_by)s\n" - "__requires__ = %(requires)r\n" - "import sys\n" - "from pkg_resources import load_entry_point\n" - "\n" - ) % locals(), ( - "sys.exit(\n" - " load_entry_point(%(spec)r, %(group)r, %(name)r)()\n" - ")\n" - ) % locals()) - - if wininst or sys.platform == "win32": - # On Windows/wininst, add a .py[w] extension. Delete any existing - # -script.py[w], .exe, and .exe.manifest. - if group=='gui_scripts': - ext = '.pyw' - old = ['','.pyw','-script.pyw','.exe','.exe.manifest'] - which_python = 'pythonw.exe' - new_header = re.sub('(?i)python.exe', which_python, header) - else: - ext = '.pyscript' - old = ['','.pyscript','.py','.pyc','.pyo','-script.py','.exe','.exe.manifest'] - which_python = 'python.exe' - new_header = re.sub('(?i)pythonw.exe', which_python, header) - - len_ext = len(ext) - script_head += ( - "# If this script doesn't work for you, make sure that the %(ext)s\n" - "# extension is included in the PATHEXT environment variable, and is\n" - "# associated with %(which_python)s in the registry.\n" - "\n" - "if sys.argv[0].endswith(%(ext)r):\n" - " sys.argv[0] = sys.argv[0][:-%(len_ext)r]\n" - "\n" - ) % locals() - - if os.path.exists(new_header[2:-1]) or sys.platform != 'win32': - hdr = new_header - else: - hdr = header - yield (name+ext, hdr + script_head + script_tail, 't', [name+x for x in old]) - - # Also write a shell script that runs the .pyscript, for cygwin. - # - # We can't use a Python script, because the Python interpreter that we want - # to use is the native Windows one, which won't understand a cygwin path. - # Windows paths written with forward slashes are universally understood - # (by native Python, cygwin Python, and bash), so we'll use 'cygpath -m' to - # get the directory from which the script was run in that form. This makes - # the cygwin script and .pyscript position-independent, provided they are - # in the same directory. - - def quote_path(s): - return "\\'".join("'" + p.replace('\\', '/') + "'" for p in s.split("'")) - - pyscript = quote_path("/"+name+ext) - python_path = quote_path(sys.executable) - shell_script_text = ( - '#!/bin/sh\n' - '%(generated_by)s\n' - '\n' - 'ScriptDir=`cygpath -m "$0/.."`\n' - '%(python_path)s "${ScriptDir}"%(pyscript)s "$@"\n' - ) % locals() - yield (name, shell_script_text, 'b') - else: - # On other platforms, we assume the right thing to do is to - # just write the stub with no extension. - yield (name, header + script_head + script_tail) - - -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - """Recursively delete a directory tree. - - This code is taken from the Python 2.4 version of 'shutil', because - the 2.3 version doesn't really work right. - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - names = [] - try: - names = os.listdir(path) - except os.error, err: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error, err: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) - -def bootstrap(): - # This function is called when setuptools*.egg is run using /bin/sh - import setuptools; argv0 = os.path.dirname(setuptools.__path__[0]) - sys.argv[0] = argv0; sys.argv.append(argv0); main() - - -def main(argv=None, **kw): - from setuptools import setup - from setuptools.dist import Distribution - import distutils.core - - USAGE = """\ -usage: %(script)s [options] requirement_or_url ... - or: %(script)s --help -""" - - def gen_usage (script_name): - script = os.path.basename(script_name) - return USAGE % vars() - - def with_ei_usage(f): - old_gen_usage = distutils.core.gen_usage - try: - distutils.core.gen_usage = gen_usage - return f() - finally: - distutils.core.gen_usage = old_gen_usage - - class DistributionWithoutHelpCommands(Distribution): - common_usage = "" - def _show_help(self,*args,**kw): - with_ei_usage(lambda: Distribution._show_help(self,*args,**kw)) - - if argv is None: - argv = sys.argv[1:] - - with_ei_usage(lambda: - setup( - script_args = ['-q','easy_install', '-v']+argv, - script_name = sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, **kw - ) - ) - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/egg_info.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/egg_info.py deleted file mode 100644 index 5a8b2db..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/egg_info.py +++ /dev/null @@ -1,451 +0,0 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -# This module should be kept compatible with Python 2.3 -import os, re -from setuptools import Command -from distutils.errors import * -from distutils import log -from setuptools.command.sdist import sdist -from distutils.util import convert_path -from distutils.filelist import FileList -from pkg_resources import parse_requirements, safe_name, parse_version, \ - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename -from sdist import walk_revctrl - -class egg_info(Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), - ('tag-svn-revision', 'r', - "Add subversion revision ID to version number"), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-svn-revision', 'R', - "Don't add subversion revision ID [default]"), - ('no-date', 'D', "Don't include date stamp [default]"), - ] - - boolean_options = ['tag-date', 'tag-svn-revision'] - negative_opt = {'no-svn-revision': 'tag-svn-revision', - 'no-date': 'tag-date'} - - - - - - - - def initialize_options(self): - self.egg_name = None - self.egg_version = None - self.egg_base = None - self.egg_info = None - self.tag_build = None - self.tag_svn_revision = 0 - self.tag_date = 0 - self.broken_egg_info = False - self.vtags = None - - def save_version_info(self, filename): - from setopt import edit_config - edit_config( - filename, - {'egg_info': - {'tag_svn_revision':0, 'tag_date': 0, 'tag_build': self.tags()} - } - ) - - - - - - - - - - - - - - - - - - - - - - - def finalize_options (self): - self.egg_name = safe_name(self.distribution.get_name()) - self.vtags = self.tags() - self.egg_version = self.tagged_version() - - try: - list( - parse_requirements('%s==%s' % (self.egg_name,self.egg_version)) - ) - except ValueError: - raise DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" % - (self.egg_name,self.egg_version) - ) - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('',os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name)+'.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - if '-' in self.egg_name: self.check_broken_egg_info() - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - if pd is not None and pd.key==self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = parse_version(self.egg_version) - self.distribution._patched_dist = None - - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn( - "%s not set in setup(), but %s exists", what, filename - ) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - def tagged_version(self): - return safe_version(self.distribution.get_version() + self.vtags) - - def run(self): - self.mkpath(self.egg_info) - installer = self.distribution.fetch_build_egg - for ep in iter_entry_points('egg_info.writers'): - writer = ep.load(installer=installer) - writer(self, ep.name, os.path.join(self.egg_info,ep.name)) - - # Get rid of native_libs.txt if it was put there by older bdist_egg - nl = os.path.join(self.egg_info, "native_libs.txt") - if os.path.exists(nl): - self.delete_file(nl) - - self.find_sources() - - def tags(self): - version = '' - if self.tag_build: - version+=self.tag_build - if self.tag_svn_revision and ( - os.path.exists('.svn') or os.path.exists('PKG-INFO') - ): version += '-r%s' % self.get_svn_revision() - if self.tag_date: - import time; version += time.strftime("-%Y%m%d") - return version - - - - - - - - - - - - - - - - - - def get_svn_revision(self): - revision = 0 - urlre = re.compile('url="([^"]+)"') - revre = re.compile('committed-rev="(\d+)"') - - for base,dirs,files in os.walk(os.curdir): - if '.svn' not in dirs: - dirs[:] = [] - continue # no sense walking uncontrolled subdirs - dirs.remove('.svn') - f = open(os.path.join(base,'.svn','entries')) - data = f.read() - f.close() - - if data.startswith('9 and d[9]]+[0]) - if base==os.curdir: - base_url = dirurl+'/' # save the root url - elif not dirurl.startswith(base_url): - dirs[:] = [] - continue # not part of the same svn tree, skip it - revision = max(revision, localrev) - - return str(revision or get_pkg_info_revision()) - - - - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info,"SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - def check_broken_egg_info(self): - bei = self.egg_name+'.egg-info' - if self.egg_base != os.curdir: - bei = os.path.join(self.egg_base, bei) - if os.path.exists(bei): - log.warn( - "-"*78+'\n' - "Note: Your current .egg-info directory has a '-' in its name;" - '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n'+'-'*78, - bei, self.egg_info - ) - self.broken_egg_info = self.egg_info - self.egg_info = bei # make it work for now - -class FileList(FileList): - """File list that accepts only existing, platform-independent paths""" - - def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] - path = convert_path(item) - if os.path.exists(path): - self.files.append(path) - - - - - - - - - -class manifest_maker(sdist): - - template = "MANIFEST.in" - - def initialize_options (self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList() - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.filelist.findall() - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def write_manifest (self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - files = self.filelist.files - if os.sep!='/': - files = [f.replace(os.sep,'/') for f in files] - self.execute(write_file, (self.manifest, files), - "writing manifest file '%s'" % self.manifest) - - def warn(self, msg): # suppress missing-file warnings from sdist - if not msg.startswith("standard file not found:"): - sdist.warn(self, msg) - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - ei_cmd = self.get_finalized_command('egg_info') - self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) - - def prune_file_list (self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1) - - -def write_file (filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - f = open(filename, "wb") # always write POSIX-style manifest - f.write("\n".join(contents)) - f.close() - - - - - - - - - - - - - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution,'zip_safe',None) - import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe) - -def warn_depends_obsolete(cmd, basename, filename): - if os.path.exists(filename): - log.warn( - "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - -def write_requirements(cmd, basename, filename): - dist = cmd.distribution - data = ['\n'.join(yield_lines(dist.install_requires or ()))] - for extra,reqs in (dist.extras_require or {}).items(): - data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs)))) - cmd.write_or_delete_file("requirements", filename, ''.join(data)) - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [k.split('.',1)[0] - for k in cmd.distribution.iter_distribution_names() - ] - ) - cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n') - - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value)+'\n' - cmd.write_or_delete_file(argname, filename, value, force) - -def write_entries(cmd, basename, filename): - ep = cmd.distribution.entry_points - - if isinstance(ep,basestring) or ep is None: - data = ep - elif ep is not None: - data = [] - for section, contents in ep.items(): - if not isinstance(contents,basestring): - contents = EntryPoint.parse_group(section, contents) - contents = '\n'.join(map(str,contents.values())) - data.append('[%s]\n%s\n\n' % (section,contents)) - data = ''.join(data) - - cmd.write_or_delete_file('entry points', filename, data, True) - -def get_pkg_info_revision(): - # See if we can get a -r### off of PKG-INFO, in case this is an sdist of - # a subversion revision - # - if os.path.exists('PKG-INFO'): - f = open('PKG-INFO','rU') - for line in f: - match = re.match(r"Version:.*-r(\d+)\s*$", line) - if match: - return int(match.group(1)) - return 0 - - - -# diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install.py deleted file mode 100644 index adaaeca..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install.py +++ /dev/null @@ -1,123 +0,0 @@ -import setuptools, sys, glob -from distutils.command.install import install as _install -from distutils.errors import DistutilsArgError - -class install(_install): - """Use easy_install to install the package, w/dependencies""" - - user_options = _install.user_options + [ - ('old-and-unmanageable', None, "Try not to use this!"), - ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), - ] - boolean_options = _install.boolean_options + [ - 'old-and-unmanageable', 'single-version-externally-managed', - ] - new_commands = [ - ('install_egg_info', lambda self: True), - ('install_scripts', lambda self: True), - ] - _nc = dict(new_commands) - sub_commands = [ - cmd for cmd in _install.sub_commands if cmd[0] not in _nc - ] + new_commands - - def initialize_options(self): - _install.initialize_options(self) - self.old_and_unmanageable = None - self.single_version_externally_managed = None - self.no_compile = None # make DISTUTILS_DEBUG work right! - - def finalize_options(self): - _install.finalize_options(self) - if self.root: - self.single_version_externally_managed = True - elif self.single_version_externally_managed: - if not self.root and not self.record: - raise DistutilsArgError( - "You must specify --record or --root when building system" - " packages" - ) - - def handle_extra_path(self): - if self.root or self.single_version_externally_managed: - # explicit backward-compatibility mode, allow extra_path to work - return _install.handle_extra_path(self) - - # Ignore extra_path when installing an egg (or being run by another - # command without --root or --single-version-externally-managed - self.path_file = None - self.extra_dirs = '' - - def run(self): - self.old_run() - if sys.platform == "win32": - from setuptools.command.scriptsetup import do_scriptsetup - do_scriptsetup() - - def old_run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return _install.run(self) - - # Attempt to detect whether we were called from setup() or by another - # command. If we were called by setup(), our caller will be the - # 'run_command' method in 'distutils.dist', and *its* caller will be - # the 'run_commands' method. If we were called any other way, our - # immediate caller *might* be 'run_command', but it won't have been - # called by 'run_commands'. This is slightly kludgy, but seems to - # work. - # - caller = sys._getframe(2) - caller_module = caller.f_globals.get('__name__','') - caller_name = caller.f_code.co_name - - if caller_module != 'distutils.dist' or caller_name!='run_commands': - # We weren't called from the command line or setup(), so we - # should run in backward-compatibility mode to support bdist_* - # commands. - _install.run(self) - else: - self.do_egg_install() - - def do_egg_install(self): - - easy_install = self.distribution.get_command_class('easy_install') - - cmd = easy_install( - self.distribution, args="x", root=self.root, record=self.record, - ) - cmd.ensure_finalized() # finalize before bdist_egg munges install cmd - cmd.always_copy_from = '.' # make sure local-dir eggs get installed - - # pick up setup-dir .egg files only: no .egg-info - cmd.package_index.scan(glob.glob('*.egg')) - - self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] - - if setuptools.bootstrap_install_from: - # Bootstrap self-installation of setuptools - args.insert(0, setuptools.bootstrap_install_from) - - cmd.args = args - cmd.run() - setuptools.bootstrap_install_from = None - - - - - - - - - - - - - - - - - -# diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_egg_info.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_egg_info.py deleted file mode 100644 index 939340c..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_egg_info.py +++ /dev/null @@ -1,123 +0,0 @@ -from setuptools import Command -from setuptools.archive_util import unpack_archive -from distutils import log, dir_util -import os, shutil, pkg_resources - -class install_egg_info(Command): - """Install an .egg-info directory for the package""" - - description = "Install an .egg-info directory for the package" - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name()+'.egg-info' - self.source = ei_cmd.egg_info - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - self.run_command('egg_info') - target = self.target - if os.path.isdir(self.target) and not os.path.islink(self.target): - dir_util.remove_tree(self.target, dry_run=self.dry_run) - elif os.path.exists(self.target): - self.execute(os.unlink,(self.target,),"Removing "+self.target) - if not self.dry_run: - pkg_resources.ensure_directory(self.target) - self.execute(self.copytree, (), - "Copying %s to %s" % (self.source, self.target) - ) - self.install_namespaces() - - def get_outputs(self): - return self.outputs - - def copytree(self): - # Copy the .egg-info tree to site-packages - def skimmer(src,dst): - # filter out source-control directories; note that 'src' is always - # a '/'-separated path, regardless of platform. 'dst' is a - # platform-specific path. - for skip in '.svn/','CVS/': - if src.startswith(skip) or '/'+skip in src: - return None - self.outputs.append(dst) - log.debug("Copying %s to %s", src, dst) - return dst - unpack_archive(self.source, self.target, skimmer) - - - - - - - - - - - - - - - - - - - - - - - - - - def install_namespaces(self): - nsp = self._get_all_ns_packages() - if not nsp: return - filename,ext = os.path.splitext(self.target) - filename += '-nspkg.pth'; self.outputs.append(filename) - log.info("Installing %s",filename) - if not self.dry_run: - f = open(filename,'wb') - for pkg in nsp: - pth = tuple(pkg.split('.')) - trailer = '\n' - if '.' in pkg: - trailer = ( - "; m and setattr(sys.modules[%r], %r, m)\n" - % ('.'.join(pth[:-1]), pth[-1]) - ) - f.write( - "import sys,new,os; " - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], " - "*%(pth)r); " - "ie = os.path.exists(os.path.join(p,'__init__.py')); " - "m = not ie and " - "sys.modules.setdefault(%(pkg)r,new.module(%(pkg)r)); " - "mp = (m or []) and m.__dict__.setdefault('__path__',[]); " - "(p not in mp) and mp.append(p)%(trailer)s" - % locals() - ) - f.close() - - def _get_all_ns_packages(self): - nsp = {} - for pkg in self.distribution.namespace_packages or []: - pkg = pkg.split('.') - while pkg: - nsp['.'.join(pkg)] = 1 - pkg.pop() - nsp=list(nsp) - nsp.sort() # set up shorter names first - return nsp - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_lib.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_lib.py deleted file mode 100644 index 96c8dfe..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_lib.py +++ /dev/null @@ -1,76 +0,0 @@ -from distutils.command.install_lib import install_lib as _install_lib -import os - -class install_lib(_install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def _bytecode_filenames (self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - if not py_file.endswith('.py'): - continue - if self.compile: - bytecode_files.append(py_file + "c") - if self.optimize > 0: - bytecode_files.append(py_file + "o") - - return bytecode_files - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - exclude = {} - nsp = self.distribution.namespace_packages - - if (nsp and self.get_finalized_command('install') - .single_version_externally_managed - ): - for pkg in nsp: - parts = pkg.split('.') - while parts: - pkgdir = os.path.join(self.install_dir, *parts) - for f in '__init__.py', '__init__.pyc', '__init__.pyo': - exclude[os.path.join(pkgdir,f)] = 1 - parts.pop() - return exclude - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return _install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)",dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = _install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_scripts.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_scripts.py deleted file mode 100644 index 79fa375..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/install_scripts.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.install_scripts import install_scripts \ - as _install_scripts -from easy_install import get_script_args, sys_executable, chmod -from pkg_resources import Distribution, PathMetadata, ensure_directory -import os -from distutils import log - -class install_scripts(_install_scripts): - """Do normal script install, plus any egg_info wrapper scripts""" - - def initialize_options(self): - _install_scripts.initialize_options(self) - self.no_ep = False - - def run(self): - self.run_command("egg_info") - if self.distribution.scripts: - _install_scripts.run(self) # run first to set up self.outfiles - else: - self.outfiles = [] - if self.no_ep: - # don't install entry point scripts into .egg file! - return - - ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, ei_cmd.egg_version, - ) - bs_cmd = self.get_finalized_command('build_scripts') - executable = getattr(bs_cmd,'executable',sys_executable) - is_wininst = getattr( - self.get_finalized_command("bdist_wininst"), '_is_running', False - ) - for args in get_script_args(dist, executable, is_wininst): - self.write_script(*args) - - - - - - def write_script(self, script_name, contents, mode="t", *ignored): - """Write an executable file to the scripts directory""" - log.info("Installing %s script to %s", script_name, self.install_dir) - target = os.path.join(self.install_dir, script_name) - self.outfiles.append(target) - - if not self.dry_run: - ensure_directory(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - chmod(target,0755) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/register.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/register.py deleted file mode 100644 index 3b2e085..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/register.py +++ /dev/null @@ -1,10 +0,0 @@ -from distutils.command.register import register as _register - -class register(_register): - __doc__ = _register.__doc__ - - def run(self): - # Make sure that we are using valid current name/version info - self.run_command('egg_info') - _register.run(self) - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/rotate.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/rotate.py deleted file mode 100644 index 8aab312..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/rotate.py +++ /dev/null @@ -1,57 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -class rotate(Command): - """Delete older distributions""" - - description = "delete older distributions, keeping N newest files" - user_options = [ - ('match=', 'm', "patterns to match (required)"), - ('dist-dir=', 'd', "directory where the distributions are"), - ('keep=', 'k', "number of matching distributions to keep"), - ] - - boolean_options = [] - - def initialize_options(self): - self.match = None - self.dist_dir = None - self.keep = None - - def finalize_options(self): - if self.match is None: - raise DistutilsOptionError( - "Must specify one or more (comma-separated) match patterns " - "(e.g. '.zip' or '.egg')" - ) - if self.keep is None: - raise DistutilsOptionError("Must specify number of files to keep") - try: - self.keep = int(self.keep) - except ValueError: - raise DistutilsOptionError("--keep must be an integer") - if isinstance(self.match, basestring): - self.match = [ - convert_path(p.strip()) for p in self.match.split(',') - ] - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - def run(self): - self.run_command("egg_info") - from glob import glob - for pattern in self.match: - pattern = self.distribution.get_name()+'*'+pattern - files = glob(os.path.join(self.dist_dir,pattern)) - files = [(os.path.getmtime(f),f) for f in files] - files.sort() - files.reverse() - - log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep:] - for (t,f) in files: - log.info("Deleting %s", f) - if not self.dry_run: - os.unlink(f) diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/saveopts.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/saveopts.py deleted file mode 100644 index 9c58d72..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/saveopts.py +++ /dev/null @@ -1,24 +0,0 @@ -import distutils, os -from setuptools import Command -from setuptools.command.setopt import edit_config, option_base - -class saveopts(option_base): - """Save command-line options to a file""" - - description = "save supplied options to setup.cfg or other config file" - - def run(self): - dist = self.distribution - commands = dist.command_options.keys() - settings = {} - - for cmd in commands: - - if cmd=='saveopts': - continue # don't save our own options! - - for opt,(src,val) in dist.get_option_dict(cmd).items(): - if src=="command line": - settings.setdefault(cmd,{})[opt] = val - - edit_config(self.filename, settings, self.dry_run) diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/scriptsetup.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/scriptsetup.py deleted file mode 100644 index db68c07..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/scriptsetup.py +++ /dev/null @@ -1,284 +0,0 @@ -from distutils.errors import DistutilsSetupError -from setuptools import Command -import sys - -class scriptsetup(Command): - action = (sys.platform == "win32" - and "set up .pyscript association and PATHEXT variable to run scripts" - or "this does nothing on non-Windows platforms") - - user_options = [ - ('allusers', 'a', - 'make changes for all users of this Windows installation (requires Administrator privileges)'), - ] - boolean_options = ['allusers'] - - def initialize_options(self): - self.allusers = False - - def finalize_options(self): - pass - - def run(self): - if sys.platform != "win32": - print "\n'scriptsetup' isn't needed on non-Windows platforms." - else: - do_scriptsetup(self.allusers) - - -def do_scriptsetup(allusers=False): - print "\nSetting up environment to run scripts for %s..." % (allusers and "all users" or "the current user") - - from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, HKEY_CLASSES_ROOT, \ - REG_SZ, REG_EXPAND_SZ, KEY_QUERY_VALUE, KEY_SET_VALUE, \ - OpenKey, CreateKey, QueryValueEx, SetValueEx, FlushKey, CloseKey - - USER_ENV = "Environment" - try: - user_env = OpenKey(HKEY_CURRENT_USER, USER_ENV, 0, KEY_QUERY_VALUE) - except WindowsError, e: - raise DistutilsSetupError("I could not read the user environment from the registry.\n%r" % (e,)) - - SYSTEM_ENV = "SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment" - try: - system_env = OpenKey(HKEY_LOCAL_MACHINE, SYSTEM_ENV, 0, KEY_QUERY_VALUE) - except WindowsError, e: - raise DistutilsSetupError("I could not read the system environment from the registry.\n%r" % (e,)) - - - # HKEY_CLASSES_ROOT is a merged view that would only confuse us. - # - - USER_CLASSES = "SOFTWARE\\Classes" - try: - user_classes = OpenKey(HKEY_CURRENT_USER, USER_CLASSES, 0, KEY_QUERY_VALUE) - except WindowsError, e: - raise DistutilsSetupError("I could not read the user filetype associations from the registry.\n%r" % (e,)) - - SYSTEM_CLASSES = "SOFTWARE\\Classes" - try: - system_classes = OpenKey(HKEY_LOCAL_MACHINE, SYSTEM_CLASSES, 0, KEY_QUERY_VALUE) - except WindowsError, e: - raise DistutilsSetupError("I could not read the system filetype associations from the registry.\n%r" % (e,)) - - - def query(key, subkey, what): - try: - (value, type) = QueryValueEx(key, subkey) - except WindowsError, e: - if e.winerror == 2: # not found - return None - raise DistutilsSetupError("I could not read %s from the registry.\n%r" % (what, e)) - - # It does not matter that we don't expand environment strings, in fact it's better not to. - - if type != REG_SZ and type != REG_EXPAND_SZ: - raise DistutilsSetupError("I expected the registry entry for %s to have a string type (REG_SZ or REG_EXPAND_SZ), " - "and was flummoxed by it having type code %r." % (what, type)) - return (value, type) - - - def open_and_query(key, path, subkey, what): - try: - read_key = OpenKey(key, path, 0, KEY_QUERY_VALUE) - except WindowsError, e: - if e.winerror == 2: # not found - return None - raise DistutilsSetupError("I could not read %s from the registry because I could not open " - "the parent key.\n%r" % (what, e)) - - try: - return query(read_key, subkey, what) - finally: - CloseKey(read_key) - - - def update(key_name_path, subkey, desired_value, desired_type, goal, what): - (key, name, path) = key_name_path - - (old_value, old_type) = open_and_query(key, path, subkey, what) or (None, None) - if (old_value, old_type) == (desired_value, desired_type): - print "Already done: %s." % (goal,) - return False - - try: - update_key = OpenKey(key, path, 0, KEY_SET_VALUE|KEY_QUERY_VALUE) - except WindowsError, e: - if e.winerror != 2: - raise DistutilsSetupError("I tried to %s, but was not successful because I could not open " - "the registry key %s\\%s for writing.\n%r" - % (goal, name, path, e)) - try: - update_key = CreateKey(key, path) - except WindowsError, e: - raise DistutilsSetupError("I tried to %s, but was not successful because the registry key %s\\%s " - "did not exist, and I was unable to create it.\n%r" - % (goal, name, path, e)) - - (new_value, new_type) = (None, None) - try: - SetValueEx(update_key, subkey, 0, desired_type, desired_value) - except WindowsError, e: - raise DistutilsSetupError("I tried to %s, but was not able to set the subkey %r under %s\\%s to be %r.\n%r" - % (goal, subkey, name, path, desired_value)) - else: - (new_value, new_type) = query(update_key, subkey, what) or (None, None) - finally: - FlushKey(update_key) - CloseKey(update_key) - - if (new_value, new_type) != (desired_value, desired_type): - raise DistutilsSetupError("I tried to %s by setting the subkey %r under %s\\%s to be %r, " - "and the call to SetValueEx succeeded, but the value ended up as " - "%r instead (it was previously %r). Maybe the update was unexpectedly virtualized?" - % (goal, subkey, name, path, desired_value, new_value, old_value)) - - print "Done: %s." % (goal,) - return True - - - # Maintenance hazard: 'add_to_environment' and 'associate' use very similar, but not identical logic. - - def add_to_environment(varname, addition, change_allusers): - changed = False - what = "the %s environment variable %s" % (change_allusers and "system" or "user", varname) - goal = "add %s to %s" % (addition, what) - - system_valueandtype = query(system_env, varname, "the system environment variable %s" % (varname,)) - user_valueandtype = query(user_env, varname, "the user environment variable %s" % (varname,)) - - if change_allusers: - (value, type) = system_valueandtype or (u'', REG_SZ) - key_name_path = (HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", SYSTEM_ENV) - else: - (value, type) = user_valueandtype or system_valueandtype or (u'', REG_SZ) - key_name_path = (HKEY_CURRENT_USER, "HKEY_CURRENT_USER", USER_ENV) - - if addition.lower() in value.lower().split(u';'): - print "Already done: %s." % (goal,) - else: - changed |= update(key_name_path, varname, value + u';' + addition, type, goal, what) - - if change_allusers: - # Also change any overriding environment entry for the current user. - (user_value, user_type) = user_valueandtype or (u'', REG_SZ) - split_value = user_value.lower().split(u';') - - if not (addition.lower() in split_value or u'%'+varname.lower()+u'%' in split_value): - now_what = "the overriding user environment variable %s" % (varname,) - changed |= update((HKEY_CURRENT_USER, "HKEY_CURRENT_USER", USER_ENV), - varname, user_value + u';' + addition, user_type, - "add %s to %s" % (addition, now_what), now_what) - - return changed - - - def associate(ext, target, change_allusers): - changed = False - what = "the %s association for %s" % (change_allusers and "system" or "user", ext) - goal = "associate the filetype %s with %s for %s" % (ext, target, change_allusers and "all users" or "the current user") - - try: - if change_allusers: - target_key = OpenKey(HKEY_LOCAL_MACHINE, "%s\\%s" % (SYSTEM_CLASSES, target), 0, KEY_QUERY_VALUE) - else: - target_key = OpenKey(HKEY_CLASSES_ROOT, target, 0, KEY_QUERY_VALUE) - except WindowsError, e: - raise DistutilsSetupError("I was going to %s, but that won't work because the %s class does not exist in the registry, " - "as far as I can tell.\n%r" % (goal, target, e)) - CloseKey(target_key) - - system_key_name_path = (HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", "%s\\%s" % (SYSTEM_CLASSES, ext)) - user_key_name_path = (HKEY_CURRENT_USER, "HKEY_CURRENT_USER", "%s\\%s" % (USER_CLASSES, ext)) - - system_valueandtype = open_and_query(system_classes, ext, "", "the system association for %s" % (ext,)) - user_valueandtype = open_and_query(user_classes, ext, "", "the user association for %s" % (ext,)) - - if change_allusers: - (value, type) = system_valueandtype or (u'', REG_SZ) - key_name_path = system_key_name_path - else: - (value, type) = user_valueandtype or system_valueandtype or (u'', REG_SZ) - key_name_path = user_key_name_path - - if value == target: - print "Already done: %s." % (goal,) - else: - changed |= update(key_name_path, "", unicode(target), REG_SZ, goal, what) - - if change_allusers: - # Also change any overriding association for the current user. - (user_value, user_type) = user_valueandtype or (u'', REG_SZ) - - if user_value != target: - changed |= update(user_key_name_path, "", unicode(target), REG_SZ, - "associate the filetype %s with %s for the current user " \ - "(because the system association is overridden)" % (ext, target), - "the overriding user association for %s" % (ext,)) - - return changed - - - def broadcast_settingchange(change_allusers): - print "Broadcasting that the environment has changed, please wait..." - - # - # - # LRESULT WINAPI SendMessageTimeoutW(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam, - # UINT fuFlags, UINT uTimeout, PDWORD_PTR lpdwResult); - - try: - from ctypes import WINFUNCTYPE, POINTER, windll, addressof, c_wchar_p - from ctypes.wintypes import LONG, HWND, UINT, WPARAM, LPARAM, DWORD - - SendMessageTimeout = WINFUNCTYPE(POINTER(LONG), HWND, UINT, WPARAM, LPARAM, UINT, UINT, POINTER(POINTER(DWORD))) \ - (("SendMessageTimeoutW", windll.user32)) - HWND_BROADCAST = 0xFFFF - WM_SETTINGCHANGE = 0x001A - SMTO_ABORTIFHUNG = 0x0002 - SendMessageTimeout(HWND_BROADCAST, WM_SETTINGCHANGE, change_allusers and 1 or 0, - addressof(c_wchar_p(u"Environment")), SMTO_ABORTIFHUNG, 5000, None); - except Exception, e: - print "Warning: %r" % (e,) - - - changed_assoc = associate(".pyscript", "Python.File", allusers) - - changed_env = False - try: - changed_env |= add_to_environment("PATHEXT", ".pyscript", allusers) - changed_env |= add_to_environment("PATHEXT", ".pyw", allusers) - finally: - CloseKey(user_env) - CloseKey(system_env) - - if changed_assoc or changed_env: - broadcast_settingchange(allusers) - - if changed_env: - # whether logout is needed seems to randomly differ between installations - # of XP, but it is not needed in Vista or later. - try: - import platform, re - need_logout = not re.search(r'^[6-9]|([1-9][0-9]+)\.', platform.version()) - except Exception, e: - e # hush pyflakes - need_logout = True - - if need_logout: - print """ -*********************************************************************** -Changes have been made to the persistent environment, but they may not -take effect in this Windows session. Running installed Python scripts -from a Command Prompt may only work after you have logged out and back -in again, or rebooted. -*********************************************************************** -""" - else: - print """ -*********************************************************************** -Changes have been made to the persistent environment, but not in this -Command Prompt. Running installed Python scripts will only work from -new Command Prompts opened from now on. -*********************************************************************** -""" diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/sdist.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/sdist.py deleted file mode 100644 index d84afdb..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/sdist.py +++ /dev/null @@ -1,246 +0,0 @@ -from distutils.command.sdist import sdist as _sdist -from distutils.util import convert_path -from distutils import log -from glob import glob -import os, re, sys, pkg_resources - -entities = [ - ("<","<"), (">", ">"), (""", '"'), ("'", "'"), - ("&", "&") -] - -def unescape(data): - for old,new in entities: - data = data.replace(old,new) - return data - -def re_finder(pattern, postproc=None): - def find(dirname, filename): - f = open(filename,'rU') - data = f.read() - f.close() - for match in pattern.finditer(data): - path = match.group(1) - if postproc: - path = postproc(path) - yield joinpath(dirname,path) - return find - -def joinpath(prefix,suffix): - if not prefix: - return suffix - return os.path.join(prefix,suffix) - - - - - - - - - -def walk_revctrl(dirname=''): - """Find all files under revision control""" - for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): - for item in ep.load()(dirname): - yield item - -def _default_revctrl(dirname=''): - for path, finder in finders: - path = joinpath(dirname,path) - if os.path.isfile(path): - for path in finder(dirname,path): - if os.path.isfile(path): - yield path - elif os.path.isdir(path): - for item in _default_revctrl(path): - yield item - -def externals_finder(dirname, filename): - """Find any 'svn:externals' directories""" - found = False - f = open(filename,'rb') - for line in iter(f.readline, ''): # can't use direct iter! - parts = line.split() - if len(parts)==2: - kind,length = parts - data = f.read(int(length)) - if kind=='K' and data=='svn:externals': - found = True - elif kind=='V' and found: - f.close() - break - else: - f.close() - return - - for line in data.splitlines(): - parts = line.split() - if parts: - yield joinpath(dirname, parts[0]) - - -entries_pattern = re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I) - -def entries_finder(dirname, filename): - f = open(filename,'rU') - data = f.read() - f.close() - if data.startswith('=6 and record[5]=="delete": - continue # skip deleted - yield joinpath(dirname, record[0]) - - -finders = [ - (convert_path('CVS/Entries'), - re_finder(re.compile(r"^\w?/([^/]+)/", re.M))), - (convert_path('.svn/entries'), entries_finder), - (convert_path('.svn/dir-props'), externals_finder), - (convert_path('.svn/dir-prop-base'), externals_finder), # svn 1.4 -] - - - - - - - - - - - - -class sdist(_sdist): - """Smart sdist that finds anything supported by revision control""" - - user_options = [ - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ] - - negative_opt = {} - - def run(self): - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt')) - self.check_readme() - self.check_metadata() - self.make_distribution() - - dist_files = getattr(self.distribution,'dist_files',[]) - for file in self.archive_files: - data = ('sdist', '', file) - if data not in dist_files: - dist_files.append(data) - - def read_template(self): - try: - _sdist.read_template(self) - except: - # grody hack to close the template file (MANIFEST.in) - # this prevents easy_install's attempt at deleting the file from - # dying and thus masking the real error - sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close() - raise - - # Cribbed from old distutils code, to work around new distutils code - # that tries to do some of the same stuff as we do, in a way that makes - # us loop. - - def add_defaults (self): - standards = [('README', 'README.txt'), self.distribution.script_name] - - for fn in standards: - if type(fn) is tuple: - alts = fn - got_it = 0 - for fn in alts: - if os.path.exists(fn): - got_it = 1 - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if os.path.exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - optional = ['test/test*.py', 'setup.cfg'] - - for pattern in optional: - files = filter(os.path.isfile, glob(pattern)) - if files: - self.filelist.extend(files) - - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - self.filelist.extend(build_py.get_source_files()) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - - def check_readme(self): - alts = ("README", "README.txt") - for f in alts: - if os.path.exists(f): - return - else: - self.warn( - "standard file not found: should have one of " +', '.join(alts) - ) - - - def make_release_tree(self, base_dir, files): - _sdist.make_release_tree(self, base_dir, files) - - # Save any egg_info command line options used to create this sdist - dest = os.path.join(base_dir, 'setup.cfg') - if hasattr(os,'link') and os.path.exists(dest): - # unlink and re-copy, since it might be hard-linked, and - # we don't want to change the source version - os.unlink(dest) - self.copy_file('setup.cfg', dest) - - self.get_finalized_command('egg_info').save_version_info(dest) - - - - - - - - -# diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/setopt.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/setopt.py deleted file mode 100644 index e0c1058..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/setopt.py +++ /dev/null @@ -1,158 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] - - -def config_file(kind="local"): - """Get the filename of the distutils, local, global, or per-user config - - `kind` must be one of "local", "global", or "user" - """ - if kind=='local': - return 'setup.cfg' - if kind=='global': - return os.path.join( - os.path.dirname(distutils.__file__),'distutils.cfg' - ) - if kind=='user': - dot = os.name=='posix' and '.' or '' - return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) - raise ValueError( - "config_file() type must be 'local', 'global', or 'user'", kind - ) - - - - - - - - - - - - - - - -def edit_config(filename, settings, dry_run=False): - """Edit a configuration file to include `settings` - - `settings` is a dictionary of dictionaries or ``None`` values, keyed by - command/section name. A ``None`` value means to delete the entire section, - while a dictionary lists settings to be changed or deleted in that section. - A setting of ``None`` means to delete that setting. - """ - from ConfigParser import RawConfigParser - log.debug("Reading configuration from %s", filename) - opts = RawConfigParser() - opts.read([filename]) - for section, options in settings.items(): - if options is None: - log.info("Deleting section [%s] from %s", section, filename) - opts.remove_section(section) - else: - if not opts.has_section(section): - log.debug("Adding new section [%s] to %s", section, filename) - opts.add_section(section) - for option,value in options.items(): - if value is None: - log.debug("Deleting %s.%s from %s", - section, option, filename - ) - opts.remove_option(section,option) - if not opts.options(section): - log.info("Deleting empty [%s] section from %s", - section, filename) - opts.remove_section(section) - else: - log.debug( - "Setting %s.%s to %r in %s", - section, option, value, filename - ) - opts.set(section,option,value) - - log.info("Writing %s", filename) - if not dry_run: - f = open(filename,'w'); opts.write(f); f.close() - -class option_base(Command): - """Abstract base class for commands that mess with config files""" - - user_options = [ - ('global-config', 'g', - "save options to the site-wide distutils.cfg file"), - ('user-config', 'u', - "save options to the current user's pydistutils.cfg file"), - ('filename=', 'f', - "configuration file to use (default=setup.cfg)"), - ] - - boolean_options = [ - 'global-config', 'user-config', - ] - - def initialize_options(self): - self.global_config = None - self.user_config = None - self.filename = None - - def finalize_options(self): - filenames = [] - if self.global_config: - filenames.append(config_file('global')) - if self.user_config: - filenames.append(config_file('user')) - if self.filename is not None: - filenames.append(self.filename) - if not filenames: - filenames.append(config_file('local')) - if len(filenames)>1: - raise DistutilsOptionError( - "Must specify only one configuration file option", - filenames - ) - self.filename, = filenames - - - - -class setopt(option_base): - """Save command-line options to a file""" - - description = "set an option in setup.cfg or another config file" - - user_options = [ - ('command=', 'c', 'command to set an option for'), - ('option=', 'o', 'option to set'), - ('set-value=', 's', 'value of the option'), - ('remove', 'r', 'remove (unset) the value'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.command = None - self.option = None - self.set_value = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.command is None or self.option is None: - raise DistutilsOptionError("Must specify --command *and* --option") - if self.set_value is None and not self.remove: - raise DistutilsOptionError("Must specify --set-value or --remove") - - def run(self): - edit_config( - self.filename, { - self.command: {self.option.replace('-','_'):self.set_value} - }, - self.dry_run - ) diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/test.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/test.py deleted file mode 100644 index df5add5..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/test.py +++ /dev/null @@ -1,164 +0,0 @@ -from setuptools import Command -from distutils.errors import DistutilsOptionError -import sys -from pkg_resources import * -from unittest import TestLoader, main - -class ScanningLoader(TestLoader): - - def loadTestsFromModule(self, module): - """Return a suite of all tests cases contained in the given module - - If the module is a package, load tests from all the modules in it. - If the module has an ``additional_tests`` function, call it and add - the return value to the tests. - """ - tests = [] - if module.__name__!='setuptools.tests.doctest': # ugh - tests.append(TestLoader.loadTestsFromModule(self,module)) - - if hasattr(module, "additional_tests"): - tests.append(module.additional_tests()) - - if hasattr(module, '__path__'): - for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file!='__init__.py': - submodule = module.__name__+'.'+file[:-3] - else: - if resource_exists( - module.__name__, file+'/__init__.py' - ): - submodule = module.__name__+'.'+file - else: - continue - tests.append(self.loadTestsFromName(submodule)) - - if len(tests)!=1: - return self.suiteClass(tests) - else: - return tests[0] # don't create a nested suite for only one return - - -class test(Command): - """Command to run unit tests after in-place build""" - - description = "run unit tests after in-place build" - - user_options = [ - ('test-module=','m', "Run 'test_suite' in specified module"), - ('test-suite=','s', - "Test suite to run (e.g. 'some_module.test_suite')"), - ('test-runner=','r', "Test runner to use"), - ] - - def initialize_options(self): - self.test_runner = None - self.test_suite = None - self.test_module = None - self.test_loader = None - - def finalize_options(self): - if self.test_suite is None: - if self.test_module is None: - self.test_suite = self.distribution.test_suite - else: - self.test_suite = self.test_module+".test_suite" - elif self.test_module: - raise DistutilsOptionError( - "You may specify a module or a suite, but not both" - ) - - self.test_args = [self.test_suite] - - if self.verbose: - self.test_args.insert(0,'--verbose') - if self.test_loader is None: - self.test_loader = getattr(self.distribution,'test_loader',None) - if self.test_loader is None: - self.test_loader = "setuptools.command.test:ScanningLoader" - if self.test_runner is None: - self.test_runner = getattr(self.distribution,'test_runner',None) - - - def with_project_on_sys_path(self, func): - # Ensure metadata is up-to-date - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - ei_cmd = self.get_finalized_command("egg_info") - - old_path = sys.path[:] - old_modules = sys.modules.copy() - - try: - sys.path.insert(0, normalize_path(ei_cmd.egg_base)) - working_set.__init__() - add_activation_listener(lambda dist: dist.activate()) - require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - func() - finally: - sys.path[:] = old_path - sys.modules.clear() - sys.modules.update(old_modules) - working_set.__init__() - - - def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs(self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - if self.test_suite: - cmd = ' '.join(self.test_args) - if self.dry_run: - self.announce('skipping "unittest %s" (dry run)' % cmd) - else: - self.announce('running "unittest %s"' % cmd) - self.with_project_on_sys_path(self.run_tests) - - - def run_tests(self): - import unittest - loader_ep = EntryPoint.parse("x="+self.test_loader) - loader_class = loader_ep.load(require=False) - kw = {} - if self.test_runner is not None: - runner_ep = EntryPoint.parse("x="+self.test_runner) - runner_class = runner_ep.load(require=False) - kw['testRunner'] = runner_class() - unittest.main( - None, None, [unittest.__file__]+self.test_args, - testLoader = loader_class(), **kw - ) - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/upload.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/command/upload.py deleted file mode 100644 index 7ac08c2..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/command/upload.py +++ /dev/null @@ -1,181 +0,0 @@ -"""distutils.command.upload - -Implements the Distutils 'upload' subcommand (upload package to PyPI).""" - -from distutils.errors import * -from distutils.core import Command -from distutils.spawn import spawn -from distutils import log -try: - from hashlib import md5 -except ImportError: - from md5 import md5 -import os -import socket -import platform -import ConfigParser -import httplib -import base64 -import urlparse -import cStringIO as StringIO - -class upload(Command): - - description = "upload binary package to PyPI" - - DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi' - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server'), - ('sign', 's', - 'sign files to upload using gpg'), - ('identity=', 'i', 'GPG identity used to sign files'), - ] - boolean_options = ['show-response', 'sign'] - - def initialize_options(self): - self.username = '' - self.password = '' - self.repository = '' - self.show_response = 0 - self.sign = False - self.identity = None - - def finalize_options(self): - if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) - if os.environ.has_key('HOME'): - rc = os.path.join(os.environ['HOME'], '.pypirc') - if os.path.exists(rc): - self.announce('Using PyPI login from %s' % rc) - config = ConfigParser.ConfigParser({ - 'username':'', - 'password':'', - 'repository':''}) - config.read(rc) - if not self.repository: - self.repository = config.get('server-login', 'repository') - if not self.username: - self.username = config.get('server-login', 'username') - if not self.password: - self.password = config.get('server-login', 'password') - if not self.repository: - self.repository = self.DEFAULT_REPOSITORY - - def run(self): - if not self.distribution.dist_files: - raise DistutilsOptionError("No dist file created in earlier command") - for command, pyversion, filename in self.distribution.dist_files: - self.upload_file(command, pyversion, filename) - - def upload_file(self, command, pyversion, filename): - # Sign if requested - if self.sign: - gpg_args = ["gpg", "--detach-sign", "-a", filename] - if self.identity: - gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) - - # Fill in the data - content = open(filename,'rb').read() - basename = os.path.basename(filename) - comment = '' - if command=='bdist_egg' and self.distribution.has_ext_modules(): - comment = "built on %s" % platform.platform(terse=1) - data = { - ':action':'file_upload', - 'protcol_version':'1', - 'name':self.distribution.get_name(), - 'version':self.distribution.get_version(), - 'content':(basename,content), - 'filetype':command, - 'pyversion':pyversion, - 'md5_digest':md5(content).hexdigest(), - } - if command == 'bdist_rpm': - dist, version, id = platform.dist() - if dist: - comment = 'built for %s %s' % (dist, version) - elif command == 'bdist_dumb': - comment = 'built for %s' % platform.platform(terse=1) - data['comment'] = comment - - if self.sign: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - open(filename+".asc").read()) - - # set up the authentication - auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip() - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = '\n--' + boundary - end_boundary = sep_boundary + '--' - body = StringIO.StringIO() - for key, value in data.items(): - # handle multiple entries for the same name - if type(value) != type([]): - value = [value] - for value in value: - if type(value) is tuple: - fn = ';filename="%s"' % value[0] - value = value[1] - else: - fn = "" - value = str(value) - body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) - body.write(fn) - body.write("\n\n") - body.write(value) - if value and value[-1] == '\r': - body.write('\n') # write an extra newline (lurve Macs) - body.write(end_boundary) - body.write("\n") - body = body.getvalue() - - self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) - - # build the Request - # We can't use urllib2 since we need to send the Basic - # auth right with the first request - schema, netloc, url, params, query, fragments = \ - urlparse.urlparse(self.repository) - assert not params and not query and not fragments - if schema == 'http': - http = httplib.HTTPConnection(netloc) - elif schema == 'https': - http = httplib.HTTPSConnection(netloc) - else: - raise AssertionError, "unsupported schema "+schema - - data = '' - loglevel = log.INFO - try: - http.connect() - http.putrequest("POST", url) - http.putheader('Content-type', - 'multipart/form-data; boundary=%s'%boundary) - http.putheader('Content-length', str(len(body))) - http.putheader('Authorization', auth) - http.endheaders() - http.send(body) - except socket.error, e: - self.announce(str(e), log.ERROR) - return - - r = http.getresponse() - if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason), - log.INFO) - else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - log.ERROR) - if self.show_response: - print '-'*75, r.read(), '-'*75 diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/depends.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/depends.py deleted file mode 100644 index 5fdf2d7..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/depends.py +++ /dev/null @@ -1,246 +0,0 @@ -from __future__ import generators -import sys, imp, marshal -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN -from distutils.version import StrictVersion, LooseVersion - -__all__ = [ - 'Require', 'find_module', 'get_module_constant', 'extract_constant' -] - -class Require: - """A prerequisite to building or installing a distribution""" - - def __init__(self,name,requested_version,module,homepage='', - attribute=None,format=None - ): - - if format is None and requested_version is not None: - format = StrictVersion - - if format is not None: - requested_version = format(requested_version) - if attribute is None: - attribute = '__version__' - - self.__dict__.update(locals()) - del self.self - - - def full_name(self): - """Return full package/distribution name, w/version""" - if self.requested_version is not None: - return '%s-%s' % (self.name,self.requested_version) - return self.name - - - def version_ok(self,version): - """Is 'version' sufficiently up-to-date?""" - return self.attribute is None or self.format is None or \ - str(version)!="unknown" and version >= self.requested_version - - - def get_version(self, paths=None, default="unknown"): - - """Get version number of installed module, 'None', or 'default' - - Search 'paths' for module. If not found, return 'None'. If found, - return the extracted version attribute, or 'default' if no version - attribute was specified, or the value cannot be determined without - importing the module. The version is formatted according to the - requirement's version format (if any), unless it is 'None' or the - supplied 'default'. - """ - - if self.attribute is None: - try: - f,p,i = find_module(self.module,paths) - if f: f.close() - return default - except ImportError: - return None - - v = get_module_constant(self.module,self.attribute,default,paths) - - if v is not None and v is not default and self.format is not None: - return self.format(v) - - return v - - - def is_present(self,paths=None): - """Return true if dependency is present on 'paths'""" - return self.get_version(paths) is not None - - - def is_current(self,paths=None): - """Return true if dependency is present and up-to-date on 'paths'""" - version = self.get_version(paths) - if version is None: - return False - return self.version_ok(version) - - -def _iter_code(code): - - """Yield '(op,arg)' pair for each operation in code object 'code'""" - - from array import array - from dis import HAVE_ARGUMENT, EXTENDED_ARG - - bytes = array('b',code.co_code) - eof = len(code.co_code) - - ptr = 0 - extended_arg = 0 - - while ptr=HAVE_ARGUMENT: - - arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg - ptr += 3 - - if op==EXTENDED_ARG: - extended_arg = arg * 65536L - continue - - else: - arg = None - ptr += 1 - - yield op,arg - - - - - - - - - - -def find_module(module, paths=None): - """Just like 'imp.find_module()', but with package support""" - - parts = module.split('.') - - while parts: - part = parts.pop(0) - f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) - - if kind==PKG_DIRECTORY: - parts = parts or ['__init__'] - paths = [path] - - elif parts: - raise ImportError("Can't find %r in %s" % (parts,module)) - - return info - - - - - - - - - - - - - - - - - - - - - - - - -def get_module_constant(module, symbol, default=-1, paths=None): - - """Find 'module' by searching 'paths', and extract 'symbol' - - Return 'None' if 'module' does not exist on 'paths', or it does not define - 'symbol'. If the module defines 'symbol' as a constant, return the - constant. Otherwise, return 'default'.""" - - try: - f, path, (suffix,mode,kind) = find_module(module,paths) - except ImportError: - # Module doesn't exist - return None - - try: - if kind==PY_COMPILED: - f.read(8) # skip magic & date - code = marshal.load(f) - elif kind==PY_FROZEN: - code = imp.get_frozen_object(module) - elif kind==PY_SOURCE: - code = compile(f.read(), path, 'exec') - else: - # Not something we can parse; we'll have to import it. :( - if module not in sys.modules: - imp.load_module(module,f,path,(suffix,mode,kind)) - return getattr(sys.modules[module],symbol,None) - - finally: - if f: - f.close() - - return extract_constant(code,symbol,default) - - - - - - - - -def extract_constant(code,symbol,default=-1): - """Extract the constant value of 'symbol' from 'code' - - If the name 'symbol' is bound to a constant value by the Python code - object 'code', return that value. If 'symbol' is bound to an expression, - return 'default'. Otherwise, return 'None'. - - Return value is based on the first assignment to 'symbol'. 'symbol' must - be a global, or at least a non-"fast" local in the code block. That is, - only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' - must be present in 'code.co_names'. - """ - - if symbol not in code.co_names: - # name's not there, can't possibly be an assigment - return None - - name_idx = list(code.co_names).index(symbol) - - STORE_NAME = 90 - STORE_GLOBAL = 97 - LOAD_CONST = 100 - - const = default - - for op, arg in _iter_code(code): - - if op==LOAD_CONST: - const = code.co_consts[arg] - elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): - return const - else: - const = default - -if sys.platform.startswith('java') or sys.platform == 'cli': - # XXX it'd be better to test assertions about bytecode instead... - del extract_constant, get_module_constant - __all__.remove('extract_constant') - __all__.remove('get_module_constant') - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/dist.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/dist.py deleted file mode 100644 index 482c6bf..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/dist.py +++ /dev/null @@ -1,861 +0,0 @@ -__all__ = ['Distribution'] - -from distutils.core import Distribution as _Distribution -from setuptools.depends import Require -from setuptools.command.install import install -from setuptools.command.sdist import sdist -from setuptools.command.install_lib import install_lib -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd -import os, distutils.log, re - -def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded - - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls - -_Distribution = _get_unpatched(_Distribution) - -sequence = tuple, list - -def check_importable(dist, attr, value): - try: - ep = pkg_resources.EntryPoint.parse('x='+value) - assert not ep.extras - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be importable 'module:attrs' string (got %r)" - % (attr,value) - ) - - -def assert_string_list(dist, attr, value): - """Verify that value is a string list or None""" - try: - assert ''.join(value)!=value - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr,value) - ) - -def check_nsp(dist, attr, value): - """Verify that namespace packages are valid""" - assert_string_list(dist,attr,value) - for nsp in value: - if not dist.has_contents_for(nsp): - raise DistutilsSetupError( - "Distribution contains no modules or packages for " + - "namespace package %r" % nsp - ) - if '.' in nsp: - parent = '.'.join(nsp.split('.')[:-1]) - if parent not in value: - distutils.log.warn( - "WARNING: %r is declared as a package namespace, but %r" - " is not: please correct this in setup.py", nsp, parent - ) - -def check_extras(dist, attr, value): - """Verify that extras_require mapping is valid""" - try: - for k,v in value.items(): - list(pkg_resources.parse_requirements(v)) - except (TypeError,ValueError,AttributeError): - raise DistutilsSetupError( - "'extras_require' must be a dictionary whose values are " - "strings or lists of strings containing valid project/version " - "requirement specifiers." - ) - - - - -def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - raise DistutilsSetupError( - "%r must be a boolean value (got %r)" % (attr,value) - ) -def check_requirements(dist, attr, value): - """Verify that install_requires is a valid requirements list""" - try: - list(pkg_resources.parse_requirements(value)) - except (TypeError,ValueError): - raise DistutilsSetupError( - "%r must be a string or list of strings " - "containing valid project/version requirement specifiers" % (attr,) - ) -def check_entry_points(dist, attr, value): - """Verify that entry_points map is parseable""" - try: - pkg_resources.EntryPoint.parse_map(value) - except ValueError, e: - raise DistutilsSetupError(e) - -def check_test_suite(dist, attr, value): - if not isinstance(value,basestring): - raise DistutilsSetupError("test_suite must be a string") - -def check_package_data(dist, attr, value): - """Verify that value is a dictionary of package names to glob lists""" - if isinstance(value,dict): - for k,v in value.items(): - if not isinstance(k,str): break - try: iter(v) - except TypeError: - break - else: - return - raise DistutilsSetupError( - attr+" must be a dictionary mapping package names to lists of " - "wildcard patterns" - ) - -def check_packages(dist, attr, value): - for pkgname in value: - if not re.match(r'\w+(\.\w+)*', pkgname): - distutils.log.warn( - "WARNING: %r not a valid package name; please use only" - ".-separated package names in setup.py", pkgname - ) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -class Distribution(_Distribution): - """Distribution with support for features, tests, and package data - - This is an enhanced version of 'distutils.dist.Distribution' that - effectively adds the following new optional keyword arguments to 'setup()': - - 'install_requires' -- a string or sequence of strings specifying project - versions that the distribution requires when installed, in the format - used by 'pkg_resources.require()'. They will be installed - automatically when the package is installed. If you wish to use - packages that are not available in PyPI, or want to give your users an - alternate download location, you can add a 'find_links' option to the - '[easy_install]' section of your project's 'setup.cfg' file, and then - setuptools will scan the listed web pages for links that satisfy the - requirements. - - 'extras_require' -- a dictionary mapping names of optional "extras" to the - additional requirement(s) that using those extras incurs. For example, - this:: - - extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) - - indicates that the distribution can optionally provide an extra - capability called "reST", but it can only be used if docutils and - reSTedit are installed. If the user installs your package using - EasyInstall and requests one of your extras, the corresponding - additional requirements will be installed if needed. - - 'features' -- a dictionary mapping option names to 'setuptools.Feature' - objects. Features are a portion of the distribution that can be - included or excluded based on user options, inter-feature dependencies, - and availability on the current system. Excluded features are omitted - from all setup commands, including source and binary distributions, so - you can create multiple distributions from the same source tree. - Feature names should be valid Python identifiers, except that they may - contain the '-' (minus) sign. Features can be included or excluded - via the command line options '--with-X' and '--without-X', where 'X' is - the name of the feature. Whether a feature is included by default, and - whether you are allowed to control this from the command line, is - determined by the Feature object. See the 'Feature' class for more - information. - - 'test_suite' -- the name of a test suite to run for the 'test' command. - If the user runs 'python setup.py test', the package will be installed, - and the named test suite will be run. The format is the same as - would be used on a 'unittest.py' command line. That is, it is the - dotted name of an object to import and call to generate a test suite. - - 'package_data' -- a dictionary mapping package names to lists of filenames - or globs to use to find data files contained in the named packages. - If the dictionary has filenames or globs listed under '""' (the empty - string), those names will be searched for in every package, in addition - to any names for the specific package. Data files found using these - names/globs will be installed along with the package, in the same - location as the package. Note that globs are allowed to reference - the contents of non-package subdirectories, as long as you use '/' as - a path separator. (Globs are automatically converted to - platform-specific paths at runtime.) - - In addition to these new keywords, this class also has several new methods - for manipulating the distribution's contents. For example, the 'include()' - and 'exclude()' methods can be thought of as in-place add and subtract - commands that add or remove packages, modules, extensions, and so on from - the distribution. They are used by the feature subsystem to configure the - distribution for the included and excluded features. - """ - - _patched_dist = None - - def patch_missing_pkg_info(self, attrs): - # Fake up a replacement for the data that would normally come from - # PKG-INFO, but which might not yet be built if this is a fresh - # checkout. - # - if not attrs or 'name' not in attrs or 'version' not in attrs: - return - key = pkg_resources.safe_name(str(attrs['name'])).lower() - dist = pkg_resources.working_set.by_key.get(key) - if dist is not None and not dist.has_metadata('PKG-INFO'): - dist._version = pkg_resources.safe_version(str(attrs['version'])) - self._patched_dist = dist - - def __init__ (self, attrs=None): - have_package_data = hasattr(self, "package_data") - if not have_package_data: - self.package_data = {} - self.require_features = [] - self.features = {} - self.dist_files = [] - self.patch_missing_pkg_info(attrs) - # Make sure we have any eggs needed to interpret 'attrs' - if attrs is not None: - self.dependency_links = attrs.pop('dependency_links', []) - assert_string_list(self,'dependency_links',self.dependency_links) - if attrs and 'setup_requires' in attrs: - self.fetch_build_eggs(attrs.pop('setup_requires')) - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - if not hasattr(self,ep.name): - setattr(self,ep.name,None) - _Distribution.__init__(self,attrs) - if isinstance(self.metadata.version, (int,long,float)): - # Some people apparently take "version number" too literally :) - self.metadata.version = str(self.metadata.version) - - def parse_command_line(self): - """Process features after parsing command line options""" - result = _Distribution.parse_command_line(self) - if self.features: - self._finalize_features() - return result - - def _feature_attrname(self,name): - """Convert feature name to corresponding option attribute name""" - return 'with_'+name.replace('-','_') - - def fetch_build_eggs(self, requires): - """Resolve pre-setup requirements""" - from pkg_resources import working_set, parse_requirements - for dist in working_set.resolve( - parse_requirements(requires), installer=self.fetch_build_egg - ): - working_set.add(dist) - - def finalize_options(self): - _Distribution.finalize_options(self) - if self.features: - self._set_global_opts_from_features() - - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self,ep.name,None) - if value is not None: - ep.require(installer=self.fetch_build_egg) - ep.load()(self, ep.name, value) - - def fetch_build_egg(self, req): - """Fetch an egg needed for building""" - try: - cmd = self._egg_fetcher - except AttributeError: - from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args':['easy_install']}) - dist.parse_config_files() - opts = dist.get_option_dict('easy_install') - keep = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts' - ) - for key in opts.keys(): - if key not in keep: - del opts[key] # don't use any other settings - if self.dependency_links: - links = self.dependency_links[:] - if 'find_links' in opts: - links = opts['find_links'][1].split() + links - opts['find_links'] = ('setup', links) - cmd = easy_install( - dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, - always_copy=False, build_directory=None, editable=False, - upgrade=False, multi_version=True, no_report = True - ) - cmd.ensure_finalized() - self._egg_fetcher = cmd - return cmd.easy_install(req) - - def _set_global_opts_from_features(self): - """Add --with-X/--without-X options based on optional features""" - - go = [] - no = self.negative_opt.copy() - - for name,feature in self.features.items(): - self._set_feature(name,None) - feature.validate(self) - - if feature.optional: - descr = feature.description - incdef = ' (default)' - excdef='' - if not feature.include_by_default(): - excdef, incdef = incdef, excdef - - go.append(('with-'+name, None, 'include '+descr+incdef)) - go.append(('without-'+name, None, 'exclude '+descr+excdef)) - no['without-'+name] = 'with-'+name - - self.global_options = self.feature_options = go + self.global_options - self.negative_opt = self.feature_negopt = no - - - - - - - - - - - - - - - - - - - def _finalize_features(self): - """Add/remove features and resolve dependencies between them""" - - # First, flag all the enabled items (and thus their dependencies) - for name,feature in self.features.items(): - enabled = self.feature_is_included(name) - if enabled or (enabled is None and feature.include_by_default()): - feature.include_in(self) - self._set_feature(name,1) - - # Then disable the rest, so that off-by-default features don't - # get flagged as errors when they're required by an enabled feature - for name,feature in self.features.items(): - if not self.feature_is_included(name): - feature.exclude_from(self) - self._set_feature(name,0) - - - def get_command_class(self, command): - """Pluggable version of get_command_class()""" - if command in self.cmdclass: - return self.cmdclass[command] - - for ep in pkg_resources.iter_entry_points('distutils.commands',command): - ep.require(installer=self.fetch_build_egg) - self.cmdclass[command] = cmdclass = ep.load() - return cmdclass - else: - return _Distribution.get_command_class(self, command) - - def print_commands(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): - if ep.name not in self.cmdclass: - cmdclass = ep.load(False) # don't require extras, we're not running - self.cmdclass[ep.name] = cmdclass - return _Distribution.print_commands(self) - - - - - - def _set_feature(self,name,status): - """Set feature's inclusion status""" - setattr(self,self._feature_attrname(name),status) - - def feature_is_included(self,name): - """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self,self._feature_attrname(name)) - - def include_feature(self,name): - """Request inclusion of feature named 'name'""" - - if self.feature_is_included(name)==0: - descr = self.features[name].description - raise DistutilsOptionError( - descr + " is required, but was excluded or is not available" - ) - self.features[name].include_in(self) - self._set_feature(name,1) - - def include(self,**attrs): - """Add items to distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would add 'x' to - the distribution's 'py_modules' attribute, if it was not already - there. - - Currently, this method only supports inclusion for attributes that are - lists or tuples. If you need to add support for adding to other - attributes in this or a subclass, you can add an '_include_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' - will try to call 'dist._include_foo({"bar":"baz"})', which can then - handle whatever special inclusion logic is needed. - """ - for k,v in attrs.items(): - include = getattr(self, '_include_'+k, None) - if include: - include(v) - else: - self._include_misc(k,v) - - def exclude_package(self,package): - """Remove packages, modules, and extensions in named package""" - - pfx = package+'.' - if self.packages: - self.packages = [ - p for p in self.packages - if p!=package and not p.startswith(pfx) - ] - - if self.py_modules: - self.py_modules = [ - p for p in self.py_modules - if p!=package and not p.startswith(pfx) - ] - - if self.ext_modules: - self.ext_modules = [ - p for p in self.ext_modules - if p.name!=package and not p.name.startswith(pfx) - ] - - - def has_contents_for(self,package): - """Return true if 'exclude_package(package)' would do something""" - - pfx = package+'.' - - for p in self.iter_distribution_names(): - if p==package or p.startswith(pfx): - return True - - - - - - - - - - - def _exclude_misc(self,name,value): - """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list or tuple (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is not None and not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - elif old: - setattr(self,name,[item for item in old if item not in value]) - - def _include_misc(self,name,value): - """Handle 'include()' for list/tuple attrs without a special handler""" - - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is None: - setattr(self,name,value) - elif not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - else: - setattr(self,name,old+[item for item in value if item not in old]) - - def exclude(self,**attrs): - """Remove items from distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from - the distribution's 'py_modules' attribute. Excluding packages uses - the 'exclude_package()' method, so all of the package's contained - packages, modules, and extensions are also excluded. - - Currently, this method only supports exclusion from attributes that are - lists or tuples. If you need to add support for excluding from other - attributes in this or a subclass, you can add an '_exclude_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' - will try to call 'dist._exclude_foo({"bar":"baz"})', which can then - handle whatever special exclusion logic is needed. - """ - for k,v in attrs.items(): - exclude = getattr(self, '_exclude_'+k, None) - if exclude: - exclude(v) - else: - self._exclude_misc(k,v) - - def _exclude_packages(self,packages): - if not isinstance(packages,sequence): - raise DistutilsSetupError( - "packages: setting must be a list or tuple (%r)" % (packages,) - ) - map(self.exclude_package, packages) - - - - - - - - - - - - - def _parse_command_opts(self, parser, args): - # Remove --with-X/--without-X options when processing command args - self.global_options = self.__class__.global_options - self.negative_opt = self.__class__.negative_opt - - # First, expand any aliases - command = args[0] - aliases = self.get_option_dict('aliases') - while command in aliases: - src,alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! - import shlex - args[:1] = shlex.split(alias,True) - command = args[0] - - nargs = _Distribution._parse_command_opts(self, parser, args) - - # Handle commands that want to consume all remaining arguments - cmd_class = self.get_command_class(command) - if getattr(cmd_class,'command_consumes_arguments',None): - self.get_option_dict(command)['args'] = ("command line", nargs) - if nargs is not None: - return [] - - return nargs - - - - - - - - - - - - - - - - - def get_cmdline_options(self): - """Return a '{cmd: {opt:val}}' map of all command-line options - - Option names are all long, but do not include the leading '--', and - contain dashes rather than underscores. If the option doesn't take - an argument (e.g. '--quiet'), the 'val' is 'None'. - - Note that options provided by config files are intentionally excluded. - """ - - d = {} - - for cmd,opts in self.command_options.items(): - - for opt,(src,val) in opts.items(): - - if src != "command line": - continue - - opt = opt.replace('_','-') - - if val==0: - cmdobj = self.get_command_obj(cmd) - neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj,'negative_opt',{})) - for neg,pos in neg_opt.items(): - if pos==opt: - opt=neg - val=None - break - else: - raise AssertionError("Shouldn't be able to get here") - - elif val==1: - val = None - - d.setdefault(cmd,{})[opt] = val - - return d - - - def iter_distribution_names(self): - """Yield all packages, modules, and extension names in distribution""" - - for pkg in self.packages or (): - yield pkg - - for module in self.py_modules or (): - yield module - - for ext in self.ext_modules or (): - if isinstance(ext,tuple): - name, buildinfo = ext - else: - name = ext.name - if name.endswith('module'): - name = name[:-6] - yield name - -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - - - - - - - - - - - - - - - - - - - -class Feature: - """A subset of the distribution that can be excluded if unneeded/wanted - - Features are created using these keyword arguments: - - 'description' -- a short, human readable description of the feature, to - be used in error messages, and option help messages. - - 'standard' -- if true, the feature is included by default if it is - available on the current system. Otherwise, the feature is only - included if requested via a command line '--with-X' option, or if - another included feature requires it. The default setting is 'False'. - - 'available' -- if true, the feature is available for installation on the - current system. The default setting is 'True'. - - 'optional' -- if true, the feature's inclusion can be controlled from the - command line, using the '--with-X' or '--without-X' options. If - false, the feature's inclusion status is determined automatically, - based on 'availabile', 'standard', and whether any other feature - requires it. The default setting is 'True'. - - 'require_features' -- a string or sequence of strings naming features - that should also be included if this feature is included. Defaults to - empty list. May also contain 'Require' objects that should be - added/removed from the distribution. - - 'remove' -- a string or list of strings naming packages to be removed - from the distribution if this feature is *not* included. If the - feature *is* included, this argument is ignored. This argument exists - to support removing features that "crosscut" a distribution, such as - defining a 'tests' feature that removes all the 'tests' subpackages - provided by other features. The default for this argument is an empty - list. (Note: the named package(s) or modules must exist in the base - distribution when the 'setup()' function is initially called.) - - other keywords -- any other keyword arguments are saved, and passed to - the distribution's 'include()' and 'exclude()' methods when the - feature is included or excluded, respectively. So, for example, you - could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be - added or removed from the distribution as appropriate. - - A feature must include at least one 'requires', 'remove', or other - keyword argument. Otherwise, it can't affect the distribution in any way. - Note also that you can subclass 'Feature' to create your own specialized - feature types that modify the distribution in other ways when included or - excluded. See the docstrings for the various methods here for more detail. - Aside from the methods, the only feature attributes that distributions look - at are 'description' and 'optional'. - """ - def __init__(self, description, standard=False, available=True, - optional=True, require_features=(), remove=(), **extras - ): - - self.description = description - self.standard = standard - self.available = available - self.optional = optional - if isinstance(require_features,(str,Require)): - require_features = require_features, - - self.require_features = [ - r for r in require_features if isinstance(r,str) - ] - er = [r for r in require_features if not isinstance(r,str)] - if er: extras['require_features'] = er - - if isinstance(remove,str): - remove = remove, - self.remove = remove - self.extras = extras - - if not remove and not require_features and not extras: - raise DistutilsSetupError( - "Feature %s: must define 'require_features', 'remove', or at least one" - " of 'packages', 'py_modules', etc." - ) - - def include_by_default(self): - """Should this feature be included by default?""" - return self.available and self.standard - - def include_in(self,dist): - - """Ensure feature and its requirements are included in distribution - - You may override this in a subclass to perform additional operations on - the distribution. Note that this method may be called more than once - per feature, and so should be idempotent. - - """ - - if not self.available: - raise DistutilsPlatformError( - self.description+" is required," - "but is not available on this platform" - ) - - dist.include(**self.extras) - - for f in self.require_features: - dist.include_feature(f) - - - - def exclude_from(self,dist): - - """Ensure feature is excluded from distribution - - You may override this in a subclass to perform additional operations on - the distribution. This method will be called at most once per - feature, and only after all included features have been asked to - include themselves. - """ - - dist.exclude(**self.extras) - - if self.remove: - for item in self.remove: - dist.exclude_package(item) - - - - def validate(self,dist): - - """Verify that feature makes sense in context of distribution - - This method is called by the distribution just before it parses its - command line. It checks to ensure that the 'remove' attribute, if any, - contains only valid package/module names that are present in the base - distribution when 'setup()' is called. You may override it in a - subclass to perform any other required validation of the feature - against a target distribution. - """ - - for item in self.remove: - if not dist.has_contents_for(item): - raise DistutilsSetupError( - "%s wants to be able to remove %s, but the distribution" - " doesn't contain any packages or modules under %s" - % (self.description, item, item) - ) - - - - - - - - - - - - - - - - - - - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/extension.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/extension.py deleted file mode 100644 index cfcf55b..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/extension.py +++ /dev/null @@ -1,35 +0,0 @@ -from distutils.core import Extension as _Extension -from dist import _get_unpatched -_Extension = _get_unpatched(_Extension) - -try: - from Pyrex.Distutils.build_ext import build_ext -except ImportError: - have_pyrex = False -else: - have_pyrex = True - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - if not have_pyrex: - # convert .pyx extensions to .c - def __init__(self,*args,**kw): - _Extension.__init__(self,*args,**kw) - sources = [] - for s in self.sources: - if s.endswith('.pyx'): - sources.append(s[:-3]+'c') - else: - sources.append(s) - self.sources = sources - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" - -import sys, distutils.core, distutils.extension -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/package_index.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/package_index.py deleted file mode 100644 index 3d82d93..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/package_index.py +++ /dev/null @@ -1,798 +0,0 @@ -"""PyPI and direct package downloading""" -import sys, os.path, re, urlparse, urllib2, shutil, random, socket, cStringIO -import httplib, urllib -from pkg_resources import * -from distutils import log -from distutils.errors import DistutilsError -try: - from hashlib import md5 -except ImportError: - from md5 import md5 -from fnmatch import translate -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') -HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) -# this is here to fix emacs' cruddy broken syntax highlighting -PYPI_MD5 = re.compile( - '([^<]+)\n\s+\\(md5\\)' -) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match -EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() - -def is_local(url_or_fname): - """ Return True if url_or_fname is a "file:" url or if it is a schemaless thing (which is presumably a filename). """ - mo = URL_SCHEME(url_or_fname) - return not (mo and mo.group(1).lower()!='file') - -def url_or_fname_to_fname(url_or_fname): - """ Assert that is_local(url_or_fname) then if it is a "file:" url, parse it and run url2pathname on it, else just return it. """ - assert is_local(url_or_fname) - - mo = URL_SCHEME(url_or_fname) - if mo: - return urllib2.url2pathname(urlparse.urlparse(url)[2]) - else: - return url_or_fname - -__all__ = [ - 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', - 'interpret_distro_name', -] - -def parse_bdist_wininst(name): - """Return (base,pyversion) or (None,None) for possible .exe name""" - - lower = name.lower() - base, py_ver = None, None - - if lower.endswith('.exe'): - if lower.endswith('.win32.exe'): - base = name[:-10] - elif lower.startswith('.win32-py',-16): - py_ver = name[-7:-4] - base = name[:-16] - - return base,py_ver - -def egg_info_for_url(url): - scheme, server, path, parameters, query, fragment = urlparse.urlparse(url) - base = urllib2.unquote(path.split('/')[-1]) - if server=='sourceforge.net' and base=='download': # XXX Yuck - base = urllib2.unquote(path.split('/')[-2]) - if '#' in base: base, fragment = base.split('#',1) - return base,fragment - -def distros_for_url(url, metadata=None): - """Yield egg or source distribution objects that might be found at a URL""" - base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): yield dist - if fragment: - match = EGG_FRAGMENT.match(fragment) - if match: - for dist in interpret_distro_name( - url, match.group(1), metadata, precedence = CHECKOUT_DIST - ): - yield dist - -def distros_for_location(location, basename, metadata=None): - """Yield egg or source distribution objects based on basename""" - if basename.endswith('.egg.zip'): - basename = basename[:-4] # strip the .zip - if basename.endswith('.egg') and '-' in basename: - # only one, unambiguous interpretation - return [Distribution.from_location(location, basename, metadata)] - if basename.endswith('.exe'): - win_base, py_ver = parse_bdist_wininst(basename) - if win_base is not None: - return interpret_distro_name( - location, win_base, metadata, py_ver, BINARY_DIST, "win32" - ) - # Try source distro extensions (.zip, .tgz, etc.) - # - for ext in EXTENSIONS: - if basename.endswith(ext): - basename = basename[:-len(ext)] - return interpret_distro_name(location, basename, metadata) - return [] # no extension matched - -def distros_for_filename(filename, metadata=None): - """Yield possible egg or source distribution objects based on a filename""" - return distros_for_location( - normalize_path(filename), os.path.basename(filename), metadata - ) - - -def interpret_distro_name(location, basename, metadata, - py_version=None, precedence=SOURCE_DIST, platform=None -): - """Generate alternative interpretations of a source distro name - - Note: if `location` is a filesystem filename, you should call - ``pkg_resources.normalize_path()`` on it before passing it to this - routine! - """ - # Generate alternative interpretations of a source distro name - # Because some packages are ambiguous as to name/versions split - # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. - # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" - # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, - # the spurious interpretations should be ignored, because in the event - # there's also an "adns" package, the spurious "python-1.1.0" version will - # compare lower than any numeric version number, and is therefore unlikely - # to match a request for it. It's still a potential problem, though, and - # in the long run PyPI and the distutils should go for "safe" names and - # versions in distribution archive names (sdist and bdist). - - parts = basename.split('-') - if not py_version: - for i,p in enumerate(parts[2:]): - if len(p)==5 and p.startswith('py2.'): - return # It's a bdist_dumb, not an sdist -- bail out - - for p in range(1,len(parts)+1): - yield Distribution( - location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence = precedence, - platform = platform - ) - -REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) -# this line is here to fix emacs' cruddy broken syntax highlighting - -def find_external_links(url, page): - """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" - - for match in REL.finditer(page): - tag, rel = match.groups() - rels = map(str.strip, rel.lower().split(',')) - if 'homepage' in rels or 'download' in rels: - for match in HREF.finditer(tag): - yield urlparse.urljoin(url, htmldecode(match.group(1))) - - for tag in ("Home Page", "Download URL"): - pos = page.find(tag) - if pos!=-1: - match = HREF.search(page,pos) - if match: - yield urlparse.urljoin(url, htmldecode(match.group(1))) - -user_agent = "Python-urllib/%s setuptools/%s" % ( - urllib2.__version__, require('setuptools')[0].version -) - - -class PackageIndex(Environment): - """A distribution index that scans web pages for download URLs""" - - def __init__(self, index_url="http://pypi.python.org/simple", hosts=('*',), - *args, **kw - ): - Environment.__init__(self,*args,**kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] - self.scanned_urls = {} - self.fetched_urls = {} - self.package_pages = {} - self.allows = re.compile('|'.join(map(translate,hosts))).match - self.to_scan = [] - - - - def process_url(self, url, retrieve=False): - """Evaluate a URL as a possible download, and maybe retrieve it""" - if url in self.scanned_urls and not retrieve: - return - self.scanned_urls[url] = True - if not URL_SCHEME(url): - self.process_filename(url) - return - else: - dists = list(distros_for_url(url)) - if dists: - if not self.url_ok(url): - return - self.debug("Found link: %s", url) - - if dists or not retrieve or url in self.fetched_urls: - map(self.add, dists) - return # don't need the actual page - - if not self.url_ok(url): - self.fetched_urls[url] = True - return - - self.info("Reading %s", url) - self.fetched_urls[url] = True # prevent multiple fetch attempts - f = self.open_url(url, "Download error: %s -- Some packages may not be found!") - if f is None: return - self.fetched_urls[f.url] = True - if 'html' not in f.headers.get('content-type', '').lower(): - f.close() # not html, we can't process it - return - - base = f.url # handle redirects - page = f.read() - f.close() - if url.startswith(self.index_url) and getattr(f,'code',None)!=404: - page = self.process_index(url, page) - for match in HREF.finditer(page): - link = urlparse.urljoin(base, htmldecode(match.group(1))) - self.process_url(link) - - def process_filename(self, fn, nested=False): - # process filenames or directories - if not os.path.exists(fn): - self.warn("Not found: %s", fn) - return - - if os.path.isdir(fn) and not nested: - path = os.path.realpath(fn) - for item in os.listdir(path): - self.process_filename(os.path.join(path,item), True) - - dists = distros_for_filename(fn) - if dists: - self.debug("Found: %s", fn) - map(self.add, dists) - - def url_ok(self, url, fatal=False): - s = URL_SCHEME(url) - if (s and s.group(1).lower()=='file') or self.allows(urlparse.urlparse(url)[1]): - return True - msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n" - if fatal: - raise DistutilsError(msg % url) - else: - self.warn(msg, url) - - def scan_egg_links(self, search_path): - for item in search_path: - if os.path.isdir(item): - for entry in os.listdir(item): - if entry.endswith('.egg-link'): - self.scan_egg_link(item, entry) - - def scan_egg_link(self, path, entry): - lines = filter(None, map(str.strip, file(os.path.join(path, entry)))) - if len(lines)==2: - for dist in find_distributions(os.path.join(path, lines[0])): - dist.location = os.path.join(path, *lines) - dist.precedence = SOURCE_DIST - self.add(dist) - - def process_index(self,url,page): - """Process the contents of a PyPI page""" - def scan(link): - # Process a URL to see if it's for a package page - if link.startswith(self.index_url): - parts = map( - urllib2.unquote, link[len(self.index_url):].split('/') - ) - if len(parts)==2 and '#' not in parts[1]: - # it's a package page, sanitize and index it - pkg = safe_name(parts[0]) - ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(),{})[link] = True - return to_filename(pkg), to_filename(ver) - return None, None - - # process an index page into the package-page index - for match in HREF.finditer(page): - scan( urlparse.urljoin(url, htmldecode(match.group(1))) ) - - pkg, ver = scan(url) # ensure this page is in the page index - if pkg: - # process individual package page - for new_url in find_external_links(url, page): - # Process the found URL - base, frag = egg_info_for_url(new_url) - if base.endswith('.py') and not frag: - if ver: - new_url+='#egg=%s-%s' % (pkg,ver) - else: - self.need_version_info(url) - self.scan_url(new_url) - - return PYPI_MD5.sub( - lambda m: '%s' % m.group(1,3,2), page - ) - else: - return "" # no sense double-scanning non-package pages - - - - def need_version_info(self, url): - self.scan_all( - "Page at %s links to .py file(s) without version info; an index " - "scan is required.", url - ) - - def scan_all(self, msg=None, *args): - if self.index_url not in self.fetched_urls: - if msg: self.warn(msg,*args) - self.info( - "Scanning index of all packages (this may take a while)" - ) - self.scan_url(self.index_url) - - def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name+'/') - - if not self.package_pages.get(requirement.key): - # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name+'/') - - if not self.package_pages.get(requirement.key): - # We couldn't find the target package, so search the index page too - self.not_found_in_index(requirement) - - for url in list(self.package_pages.get(requirement.key,())): - # scan each page that might be related to the desired package - self.scan_url(url) - - def obtain(self, requirement, installer=None): - self.prescan(); self.find_packages(requirement) - for dist in self[requirement.key]: - if dist in requirement: - return dist - self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement,installer) - - - - - - def check_md5(self, cs, info, filename, tfp): - if re.match('md5=[0-9a-f]{32}$', info): - self.debug("Validating md5 checksum for %s", filename) - if cs.hexdigest()!=info[4:]: - tfp.close() - os.unlink(filename) - raise DistutilsError( - "MD5 validation failed for "+os.path.basename(filename)+ - "; possible download problem?" - ) - - def add_find_links(self, urls): - """Add `urls` to the list that will be prescanned for searches""" - for url in urls: - if ( - self.to_scan is None # if we have already "gone online" - or not URL_SCHEME(url) # or it's a local file/directory - or url.startswith('file:') - or list(distros_for_url(url)) # or a direct package link - ): - # then go ahead and process it now - self.scan_url(url) - else: - # otherwise, defer retrieval till later - self.to_scan.append(url) - - def prescan(self): - """Scan urls scheduled for prescanning (e.g. --find-links)""" - if self.to_scan: - map(self.scan_url, self.to_scan) - self.to_scan = None # from now on, go ahead and process immediately - - def not_found_in_index(self, requirement): - if self[requirement.key]: # we've seen at least one distro - meth, msg = self.info, "Couldn't retrieve index page for %r" - else: # no distros seen for this name, might be misspelled - meth, msg = (self.warn, - "Couldn't find index page for %r (maybe misspelled?)") - meth(msg, requirement.unsafe_name) - self.scan_all() - - def download(self, spec, tmpdir): - """Locate and/or download `spec` to `tmpdir`, returning a local path - - `spec` may be a ``Requirement`` object, or a string containing a URL, - an existing local filename, or a project/version requirement spec - (i.e. the string form of a ``Requirement`` object). If it is the URL - of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one - that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is - automatically created alongside the downloaded file. - - If `spec` is a ``Requirement`` object or a string containing a - project/version requirement spec, this method returns the location of - a matching distribution (possibly after downloading it to `tmpdir`). - If `spec` is a locally existing file or directory name, it is simply - returned unchanged. If `spec` is a URL, it is downloaded to a subpath - of `tmpdir`, and the local filename is returned. Various errors may be - raised if a problem occurs during downloading. - """ - if not isinstance(spec,Requirement): - scheme = URL_SCHEME(spec) - if scheme: - # It's a url, download it to tmpdir - found = self._download_url(scheme.group(1), spec, tmpdir) - base, fragment = egg_info_for_url(spec) - if base.endswith('.py'): - found = self.gen_setup(found,fragment,tmpdir) - return found - elif os.path.exists(spec): - # Existing file or directory, just return it - return spec - else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) - return getattr(self.fetch_distribution(spec, tmpdir),'location',None) - - - def fetch_distribution(self, - requirement, tmpdir, force_scan=False, source=False, develop_ok=False, - local_index=None, - ): - """Obtain a distribution suitable for fulfilling `requirement` - - `requirement` must be a ``pkg_resources.Requirement`` instance. - If necessary, or if the `force_scan` flag is set, the requirement is - searched for in the (online) package index as well as the locally - installed packages. If a distribution matching `requirement` is found, - the returned distribution's ``location`` is the value you would have - gotten from calling the ``download()`` method with the matching - distribution's URL or filename. If no matching distribution is found, - ``None`` is returned. - - If the `source` flag is set, only source distributions and source - checkout links will be considered. Unless the `develop_ok` flag is - set, development and system eggs (i.e., those using the ``.egg-info`` - format) will be ignored. - """ - # process a Requirement - self.info("Searching for %s", requirement) - skipped = {} - dist = None - - def find(env, req): - # Find a matching distribution; may be called more than once - - # first try to find a local dist - for allow_remote in (False, True): - # then try to find a platform-dependent dist - for allow_platform_independent in (False, True): - for dist in env[req.key]: - if dist.precedence==DEVELOP_DIST and not develop_ok: - if dist not in skipped: - self.warn("Skipping development or system egg: %s",dist) - skipped[dist] = 1 - continue - - if ((is_local(dist.location) or allow_remote) and - (dist in req) and - ((allow_platform_independent or dist.platform is not None) and - (dist.precedence<=SOURCE_DIST or not source))): - return dist - - if force_scan: - self.prescan() - self.find_packages(requirement) - dist = find(self, requirement) - - if local_index is not None: - dist = dist or find(local_index, requirement) - - if dist is None and self.to_scan is not None: - self.prescan() - dist = find(self, requirement) - - if dist is None and not force_scan: - self.find_packages(requirement) - dist = find(self, requirement) - - if dist is None: - self.warn( - "No local packages or download links found for %s%s", - (source and "a source distribution of " or ""), - requirement, - ) - else: - self.info("Best match: %s", dist) - return dist.clone(location=self.download(dist.location, tmpdir)) - - - def fetch(self, requirement, tmpdir, force_scan=False, source=False): - """Obtain a file suitable for fulfilling `requirement` - - DEPRECATED; use the ``fetch_distribution()`` method now instead. For - backward compatibility, this routine is identical but returns the - ``location`` of the downloaded distribution instead of a distribution - object. - """ - dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) - if dist is not None: - return dist.location - return None - - - def gen_setup(self, filename, fragment, tmpdir): - match = EGG_FRAGMENT.match(fragment) - dists = match and [d for d in - interpret_distro_name(filename, match.group(1), None) if d.version - ] or [] - - if len(dists)==1: # unambiguous ``#egg`` fragment - basename = os.path.basename(filename) - - # Make sure the file has been downloaded to the temp dir. - if os.path.dirname(filename) != tmpdir: - dst = os.path.join(tmpdir, basename) - from setuptools.command.easy_install import samefile - if not samefile(filename, dst): - shutil.copy2(filename, dst) - filename=dst - - file = open(os.path.join(tmpdir, 'setup.py'), 'w') - file.write( - "from setuptools import setup\n" - "setup(name=%r, version=%r, py_modules=[%r])\n" - % ( - dists[0].project_name, dists[0].version, - os.path.splitext(basename)[0] - ) - ) - file.close() - return filename - - elif match: - raise DistutilsError( - "Can't unambiguously interpret project/version identifier %r; " - "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment,dists) - ) - else: - raise DistutilsError( - "Can't process plain .py files without an '#egg=name-version'" - " suffix to enable automatic setup script generation." - ) - - dl_blocksize = 8192 - def _download_to(self, url, filename): - self.info("Downloading %s", url) - # Download the file - fp, tfp, info = None, None, None - try: - if '#' in url: - url, info = url.split('#', 1) - fp = self.open_url(url) - if isinstance(fp, urllib2.HTTPError): - raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code,fp.msg) - ) - cs = md5() - headers = fp.info() - blocknum = 0 - bs = self.dl_blocksize - size = -1 - if "content-length" in headers: - size = int(headers["Content-Length"]) - self.reporthook(url, filename, blocknum, bs, size) - tfp = open(filename,'wb') - while True: - block = fp.read(bs) - if block: - cs.update(block) - tfp.write(block) - blocknum += 1 - self.reporthook(url, filename, blocknum, bs, size) - else: - break - if info: self.check_md5(cs, info, filename, tfp) - return headers - finally: - if fp: fp.close() - if tfp: tfp.close() - - def reporthook(self, url, filename, blocknum, blksize, size): - pass # no-op - - - def open_url(self, url, warning=None): - if url.startswith('file:'): return local_open(url) - try: - return open_with_auth(url) - except urllib2.HTTPError, v: - return v - except urllib2.URLError, v: - reason = v.reason - except httplib.HTTPException, v: - reason = "%s: %s" % (v.__doc__ or v.__class__.__name__, v) - if warning: - self.warn(warning, reason) - else: - raise DistutilsError("Download error for %s: %s" % (url, reason)) - - def _download_url(self, scheme, url, tmpdir): - # Determine download filename - # - name, fragment = egg_info_for_url(url) - if name: - while '..' in name: - name = name.replace('..','.').replace('\\','_') - else: - name = "__downloaded__" # default if URL has no path contents - - if name.endswith('.egg.zip'): - name = name[:-4] # strip the extra .zip before download - - filename = os.path.join(tmpdir,name) - - # Download the file - # - if scheme=='svn' or scheme.startswith('svn+'): - return self._download_svn(url, filename) - elif scheme=='file': - return urllib2.url2pathname(urlparse.urlparse(url)[2]) - else: - self.url_ok(url, True) # raises error if not allowed - return self._attempt_download(url, filename) - - - def scan_url(self, url): - self.process_url(url, True) - - - def _attempt_download(self, url, filename): - headers = self._download_to(url, filename) - if 'html' in headers.get('content-type','').lower(): - return self._download_html(url, headers, filename) - else: - return filename - - def _download_html(self, url, headers, filename): - file = open(filename) - for line in file: - if line.strip(): - # Check for a subversion index page - if re.search(r'([^- ]+ - )?Revision \d+:', line): - # it's a subversion index page: - file.close() - os.unlink(filename) - return self._download_svn(url, filename) - break # not an index page - file.close() - os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at "+url) - - def _download_svn(self, url, filename): - url = url.split('#',1)[0] # remove any fragment for svn's sake - self.info("Doing subversion checkout from %s to %s", url, filename) - os.system("svn checkout -q %s %s" % (url, filename)) - return filename - - def debug(self, msg, *args): - log.debug(msg, *args) - - def info(self, msg, *args): - log.info(msg, *args) - - def warn(self, msg, *args): - log.warn(msg, *args) - -# This pattern matches a character entity reference (a decimal numeric -# references, a hexadecimal numeric reference, or a named reference). -entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub - -def uchr(c): - if not isinstance(c, int): - return c - if c>255: return unichr(c) - return chr(c) - -def decode_entity(match): - what = match.group(1) - if what.startswith('#x'): - what = int(what[2:], 16) - elif what.startswith('#'): - what = int(what[1:]) - else: - from htmlentitydefs import name2codepoint - what = name2codepoint.get(what, match.group(0)) - return uchr(what) - -def htmldecode(text): - """Decode HTML entities in the given text.""" - return entity_sub(decode_entity, text) - - - - - - - - - - - - - - - - - -def open_with_auth(url): - """Open a urllib2 request, handling HTTP authentication""" - - scheme, netloc, path, params, query, frag = urlparse.urlparse(url) - - if scheme in ('http', 'https'): - auth, host = urllib.splituser(netloc) - else: - auth = None - - if auth: - auth = "Basic " + urllib2.unquote(auth).encode('base64').strip() - new_url = urlparse.urlunparse((scheme,host,path,params,query,frag)) - request = urllib2.Request(new_url) - request.add_header("Authorization", auth) - else: - request = urllib2.Request(url) - - request.add_header('User-Agent', user_agent) - fp = urllib2.urlopen(request) - - if auth: - # Put authentication info back into request URL if same host, - # so that links found on the page will work - s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url) - if s2==scheme and h2==host: - fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2)) - - return fp - - - - - - - - - - - - -def fix_sf_url(url): - return url # backward compatibility - -def local_open(url): - """Read a local path, with special support for directories""" - scheme, server, path, param, query, frag = urlparse.urlparse(url) - filename = urllib2.url2pathname(path) - if os.path.isfile(filename): - return urllib2.urlopen(url) - elif path.endswith('/') and os.path.isdir(filename): - files = [] - for f in os.listdir(filename): - if f=='index.html': - body = open(os.path.join(filename,f),'rb').read() - break - elif os.path.isdir(os.path.join(filename,f)): - f+='/' - files.append("<a href=%r>%s</a>" % (f,f)) - else: - body = ("<html><head><title>%s" % url) + \ - "%s" % '\n'.join(files) - status, message = 200, "OK" - else: - status, message, body = 404, "Path not found", "Not found" - - return urllib2.HTTPError(url, status, message, - {'content-type':'text/html'}, cStringIO.StringIO(body)) - - - - - - - - - - - - - -# this line is a kludge to keep the trailing blank lines for pje's editor diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/sandbox.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/sandbox.py deleted file mode 100644 index 4c5e712..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/sandbox.py +++ /dev/null @@ -1,287 +0,0 @@ -import os, sys, __builtin__, tempfile, operator, pkg_resources -_os = sys.modules[os.name] -_open = open -_file = file - -from distutils.errors import DistutilsError -from pkg_resources import working_set - -__all__ = [ - "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", -] - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -def run_setup(setup_script, args): - """Run a distutils setup script, sandboxed in its directory""" - old_dir = os.getcwd() - save_argv = sys.argv[:] - save_path = sys.path[:] - setup_dir = os.path.abspath(os.path.dirname(setup_script)) - temp_dir = os.path.join(setup_dir,'temp') - if not os.path.isdir(temp_dir): os.makedirs(temp_dir) - save_tmp = tempfile.tempdir - save_modules = sys.modules.copy() - pr_state = pkg_resources.__getstate__() - try: - tempfile.tempdir = temp_dir; os.chdir(setup_dir) - try: - sys.argv[:] = [setup_script]+list(args) - sys.path.insert(0, setup_dir) - # reset to include setup dir, w/clean callback list - working_set.__init__() - working_set.callbacks.append(lambda dist:dist.activate()) - DirectorySandbox(setup_dir).run( - lambda: execfile( - "setup.py", - {'__file__':setup_script, '__name__':'__main__'} - ) - ) - except SystemExit, v: - if v.args and v.args[0]: - raise - # Normal exit, just return - finally: - pkg_resources.__setstate__(pr_state) - sys.modules.update(save_modules) - for key in list(sys.modules): - if key not in save_modules: del sys.modules[key] - os.chdir(old_dir) - sys.path[:] = save_path - sys.argv[:] = save_argv - tempfile.tempdir = save_tmp - - - -class AbstractSandbox: - """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" - - _active = False - - def __init__(self): - self._attrs = [ - name for name in dir(_os) - if not name.startswith('_') and hasattr(self,name) - ] - - def _copy(self, source): - for name in self._attrs: - setattr(os, name, getattr(source,name)) - - def run(self, func): - """Run 'func' under os sandboxing""" - try: - self._copy(self) - __builtin__.file = self._file - __builtin__.open = self._open - self._active = True - return func() - finally: - self._active = False - __builtin__.open = _open - __builtin__.file = _file - self._copy(_os) - - def _mk_dual_path_wrapper(name): - original = getattr(_os,name) - def wrap(self,src,dst,*args,**kw): - if self._active: - src,dst = self._remap_pair(name,src,dst,*args,**kw) - return original(src,dst,*args,**kw) - return wrap - - for name in ["rename", "link", "symlink"]: - if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) - - - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return original(path,*args,**kw) - return wrap - - _open = _mk_single_path_wrapper('open', _open) - _file = _mk_single_path_wrapper('file', _file) - for name in [ - "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", - "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", - "startfile", "mkfifo", "mknod", "pathconf", "access" - ]: - if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) - - def _mk_single_with_return(name): - original = getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return self._remap_output(name, original(path,*args,**kw)) - return original(path,*args,**kw) - return wrap - - for name in ['readlink', 'tempnam']: - if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) - - def _mk_query(name): - original = getattr(_os,name) - def wrap(self,*args,**kw): - retval = original(*args,**kw) - if self._active: - return self._remap_output(name, retval) - return retval - return wrap - - for name in ['getcwd', 'tmpnam']: - if hasattr(_os,name): locals()[name] = _mk_query(name) - - def _validate_path(self,path): - """Called to remap or validate any path, whether input or output""" - return path - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - return self._validate_path(path) - - def _remap_output(self,operation,path): - """Called for path outputs""" - return self._validate_path(path) - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - return ( - self._remap_input(operation+'-from',src,*args,**kw), - self._remap_input(operation+'-to',dst,*args,**kw) - ) - - -class DirectorySandbox(AbstractSandbox): - """Restrict operations to a single subdirectory - pseudo-chroot""" - - write_ops = dict.fromkeys([ - "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", - "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", - ]) - - def __init__(self,sandbox): - self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox,'') - AbstractSandbox.__init__(self) - - def _violation(self, operation, *args, **kw): - raise SandboxViolation(operation, args, kw) - - def _open(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("open", path, mode, *args, **kw) - return _open(path,mode,*args,**kw) - - def tmpnam(self): self._violation("tmpnam") - - def _ok(self,path): - if hasattr(_os,'devnull') and path==_os.devnull: return True - active = self._active - try: - self._active = False - realpath = os.path.normcase(os.path.realpath(path)) - if realpath==self._sandbox or realpath.startswith(self._prefix): - return True - finally: - self._active = active - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - if operation in self.write_ops and not self._ok(path): - self._violation(operation, os.path.realpath(path), *args, **kw) - return path - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - if not self._ok(src) or not self._ok(dst): - self._violation(operation, src, dst, *args, **kw) - return (src,dst) - - def _file(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("file", path, mode, *args, **kw) - return _file(path,mode,*args,**kw) - - def open(self, file, flags, mode=0777): - """Called for low-level os.open()""" - if flags & WRITE_FLAGS and not self._ok(file): - self._violation("os.open", file, flags, mode) - return _os.open(file,flags,mode) - -WRITE_FLAGS = reduce( - operator.or_, [getattr(_os, a, 0) for a in - "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] -) - -class SandboxViolation(DistutilsError): - """A setup script attempted to modify the filesystem outside the sandbox""" - - def __str__(self): - return """SandboxViolation: %s%r %s - -The package setup script has attempted to modify files on your system -that are not within the EasyInstall build area, and has been aborted. - -This package cannot be safely installed by EasyInstall, and may not -support alternate installation locations even if you run its setup -script by hand. Please inform the package's author and the EasyInstall -maintainers to find out if a fix or workaround is available.""" % self.args - - - - - - - - - - - - - - - - - - - - - - - - - - - -# diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/site-patch.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/site-patch.py deleted file mode 100644 index b1b27b9..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/site-patch.py +++ /dev/null @@ -1,74 +0,0 @@ -def __boot(): - import sys, imp, os, os.path - PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): - PYTHONPATH = [] - else: - PYTHONPATH = PYTHONPATH.split(os.pathsep) - - pic = getattr(sys,'path_importer_cache',{}) - stdpath = sys.path[len(PYTHONPATH):] - mydir = os.path.dirname(__file__) - #print "searching",stdpath,sys.path - - for item in stdpath: - if item==mydir or not item: - continue # skip if current dir. on Windows, or my own directory - importer = pic.get(item) - if importer is not None: - loader = importer.find_module('site') - if loader is not None: - # This should actually reload the current module - loader.load_module('site') - break - else: - try: - stream, path, descr = imp.find_module('site',[item]) - except ImportError: - continue - if stream is None: - continue - try: - # This should actually reload the current module - imp.load_module('site',stream,path,descr) - finally: - stream.close() - break - else: - raise ImportError("Couldn't find the real 'site' module") - - #print "loaded", __file__ - - known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys,'__egginsert',0) # save old insertion position - sys.__egginsert = 0 # and reset the current one - - for item in PYTHONPATH: - addsitedir(item) - - sys.__egginsert += oldpos # restore effective old position - - d,nd = makepath(stdpath[0]) - insert_at = None - new_path = [] - - for item in sys.path: - p,np = makepath(item) - - if np==nd and insert_at is None: - # We've hit the first 'system' path entry, so added entries go here - insert_at = len(new_path) - - if np in known_paths or insert_at is None: - new_path.append(item) - else: - # new path after the insert point, back-insert it - new_path.insert(insert_at, item) - insert_at += 1 - - sys.path[:] = new_path - -if __name__=='site': - __boot() - del __boot diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/__init__.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/__init__.py deleted file mode 100644 index 3c4f3cb..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/__init__.py +++ /dev/null @@ -1,369 +0,0 @@ -"""Tests for the 'setuptools' package""" -from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader -import distutils.core, distutils.cmd -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -import setuptools, setuptools.dist -from setuptools import Feature -from distutils.core import Extension -extract_constant, get_module_constant = None, None -from setuptools.depends import * -from distutils.version import StrictVersion, LooseVersion -from distutils.util import convert_path -import sys, os.path - -def additional_tests(): - import doctest, unittest - suite = unittest.TestSuite(( - doctest.DocFileSuite('api_tests.txt', - optionflags=doctest.ELLIPSIS, package=__name__, - ), - )) - if sys.platform == 'win32': - suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt')) - return suite - -def makeSetup(**args): - """Return distribution from 'setup(**args)', without executing commands""" - - distutils.core._setup_stop_after = "commandline" - - # Don't let system command line leak into tests! - args.setdefault('script_args',['install']) - - try: - return setuptools.setup(**args) - finally: - distutils.core_setup_stop_after = None - - - - -class DependsTests(TestCase): - - def testExtractConst(self): - if not extract_constant: return # skip on non-bytecode platforms - - def f1(): - global x,y,z - x = "test" - y = z - - # unrecognized name - self.assertEqual(extract_constant(f1.func_code,'q', -1), None) - - # constant assigned - self.assertEqual(extract_constant(f1.func_code,'x', -1), "test") - - # expression assigned - self.assertEqual(extract_constant(f1.func_code,'y', -1), -1) - - # recognized name, not assigned - self.assertEqual(extract_constant(f1.func_code,'z', -1), None) - - - def testFindModule(self): - self.assertRaises(ImportError, find_module, 'no-such.-thing') - self.assertRaises(ImportError, find_module, 'setuptools.non-existent') - f,p,i = find_module('setuptools.tests'); f.close() - - def testModuleExtract(self): - if not get_module_constant: return # skip on non-bytecode platforms - from distutils import __version__ - self.assertEqual( - get_module_constant('distutils','__version__'), __version__ - ) - self.assertEqual( - get_module_constant('sys','version'), sys.version - ) - self.assertEqual( - get_module_constant('setuptools.tests','__doc__'),__doc__ - ) - - def testRequire(self): - if not extract_constant: return # skip on non-bytecode platforms - - req = Require('Distutils','1.0.3','distutils') - - self.assertEqual(req.name, 'Distutils') - self.assertEqual(req.module, 'distutils') - self.assertEqual(req.requested_version, '1.0.3') - self.assertEqual(req.attribute, '__version__') - self.assertEqual(req.full_name(), 'Distutils-1.0.3') - - from distutils import __version__ - self.assertEqual(req.get_version(), __version__) - self.failUnless(req.version_ok('1.0.9')) - self.failIf(req.version_ok('0.9.1')) - self.failIf(req.version_ok('unknown')) - - self.failUnless(req.is_present()) - self.failUnless(req.is_current()) - - req = Require('Distutils 3000','03000','distutils',format=LooseVersion) - self.failUnless(req.is_present()) - self.failIf(req.is_current()) - self.failIf(req.version_ok('unknown')) - - req = Require('Do-what-I-mean','1.0','d-w-i-m') - self.failIf(req.is_present()) - self.failIf(req.is_current()) - - req = Require('Tests', None, 'tests', homepage="http://example.com") - self.assertEqual(req.format, None) - self.assertEqual(req.attribute, None) - self.assertEqual(req.requested_version, None) - self.assertEqual(req.full_name(), 'Tests') - self.assertEqual(req.homepage, 'http://example.com') - - paths = [os.path.dirname(p) for p in __path__] - self.failUnless(req.is_present(paths)) - self.failUnless(req.is_current(paths)) - - -class DistroTests(TestCase): - - def setUp(self): - self.e1 = Extension('bar.ext',['bar.c']) - self.e2 = Extension('c.y', ['y.c']) - - self.dist = makeSetup( - packages=['a', 'a.b', 'a.b.c', 'b', 'c'], - py_modules=['b.d','x'], - ext_modules = (self.e1, self.e2), - package_dir = {}, - ) - - - def testDistroType(self): - self.failUnless(isinstance(self.dist,setuptools.dist.Distribution)) - - - def testExcludePackage(self): - self.dist.exclude_package('a') - self.assertEqual(self.dist.packages, ['b','c']) - - self.dist.exclude_package('b') - self.assertEqual(self.dist.packages, ['c']) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) - - self.dist.exclude_package('c') - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - # test removals from unspecified options - makeSetup().exclude_package('x') - - - - - - - - def testIncludeExclude(self): - # remove an extension - self.dist.exclude(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2]) - - # add it back in - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - # should not add duplicate - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - def testExcludePackages(self): - self.dist.exclude(packages=['c','b','a']) - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - def testEmpty(self): - dist = makeSetup() - dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - dist = makeSetup() - dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - - def testContents(self): - self.failUnless(self.dist.has_contents_for('a')) - self.dist.exclude_package('a') - self.failIf(self.dist.has_contents_for('a')) - - self.failUnless(self.dist.has_contents_for('b')) - self.dist.exclude_package('b') - self.failIf(self.dist.has_contents_for('b')) - - self.failUnless(self.dist.has_contents_for('c')) - self.dist.exclude_package('c') - self.failIf(self.dist.has_contents_for('c')) - - - - - def testInvalidIncludeExclude(self): - self.assertRaises(DistutilsSetupError, - self.dist.include, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, ext_modules={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, ext_modules={'x':'y'} - ) - - self.assertRaises(DistutilsSetupError, - self.dist.include, package_dir=['q'] - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, package_dir=['q'] - ) - - - - - - - - - - - - - - - -class FeatureTests(TestCase): - - def setUp(self): - self.req = Require('Distutils','1.0.3','distutils') - self.dist = makeSetup( - features={ - 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), - 'bar': Feature("bar", standard=True, packages=['pkg.bar'], - py_modules=['bar_et'], remove=['bar.ext'], - ), - 'baz': Feature( - "baz", optional=False, packages=['pkg.baz'], - scripts = ['scripts/baz_it'], - libraries=[('libfoo','foo/foofoo.c')] - ), - 'dwim': Feature("DWIM", available=False, remove='bazish'), - }, - script_args=['--without-bar', 'install'], - packages = ['pkg.bar', 'pkg.foo'], - py_modules = ['bar_et', 'bazish'], - ext_modules = [Extension('bar.ext',['bar.c'])] - ) - - def testDefaults(self): - self.failIf( - Feature( - "test",standard=True,remove='x',available=False - ).include_by_default() - ) - self.failUnless( - Feature("test",standard=True,remove='x').include_by_default() - ) - # Feature must have either kwargs, removes, or require_features - self.assertRaises(DistutilsSetupError, Feature, "test") - - def testAvailability(self): - self.assertRaises( - DistutilsPlatformError, - self.dist.features['dwim'].include_in, self.dist - ) - - def testFeatureOptions(self): - dist = self.dist - self.failUnless( - ('with-dwim',None,'include DWIM') in dist.feature_options - ) - self.failUnless( - ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options - ) - self.failUnless( - ('with-bar',None,'include bar (default)') in dist.feature_options - ) - self.failUnless( - ('without-bar',None,'exclude bar') in dist.feature_options - ) - self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') - self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') - self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') - self.failIf('without-baz' in dist.feature_negopt) - - def testUseFeatures(self): - dist = self.dist - self.assertEqual(dist.with_foo,1) - self.assertEqual(dist.with_bar,0) - self.assertEqual(dist.with_baz,1) - self.failIf('bar_et' in dist.py_modules) - self.failIf('pkg.bar' in dist.packages) - self.failUnless('pkg.baz' in dist.packages) - self.failUnless('scripts/baz_it' in dist.scripts) - self.failUnless(('libfoo','foo/foofoo.c') in dist.libraries) - self.assertEqual(dist.ext_modules,[]) - self.assertEqual(dist.require_features, [self.req]) - - # If we ask for bar, it should fail because we explicitly disabled - # it on the command line - self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') - - def testFeatureWithInvalidRemove(self): - self.assertRaises( - SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} - ) - -class TestCommandTests(TestCase): - - def testTestIsCommand(self): - test_cmd = makeSetup().get_command_obj('test') - self.failUnless(isinstance(test_cmd, distutils.cmd.Command)) - - def testLongOptSuiteWNoDefault(self): - ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) - ts1 = ts1.get_command_obj('test') - ts1.ensure_finalized() - self.assertEqual(ts1.test_suite, 'foo.tests.suite') - - def testDefaultSuite(self): - ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') - ts2.ensure_finalized() - self.assertEqual(ts2.test_suite, 'bar.tests.suite') - - def testDefaultWModuleOnCmdLine(self): - ts3 = makeSetup( - test_suite='bar.tests', - script_args=['test','-m','foo.tests'] - ).get_command_obj('test') - ts3.ensure_finalized() - self.assertEqual(ts3.test_module, 'foo.tests') - self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') - - def testConflictingOptions(self): - ts4 = makeSetup( - script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] - ).get_command_obj('test') - self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) - - def testNoSuite(self): - ts5 = makeSetup().get_command_obj('test') - ts5.ensure_finalized() - self.assertEqual(ts5.test_suite, None) - - - - - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/test_packageindex.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/test_packageindex.py deleted file mode 100644 index 0231eda..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/test_packageindex.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Package Index Tests -""" -# More would be better! - -import os, shutil, tempfile, unittest, urllib2 -import pkg_resources -import setuptools.package_index - -class TestPackageIndex(unittest.TestCase): - - def test_bad_urls(self): - index = setuptools.package_index.PackageIndex() - url = 'http://127.0.0.1/nonesuch/test_package_index' - try: - v = index.open_url(url) - except Exception, v: - self.assert_(url in str(v)) - else: - self.assert_(isinstance(v,urllib2.HTTPError)) - - def test_url_ok(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - url = 'file:///tmp/test_package_index' - self.assert_(index.url_ok(url, True)) - diff --git a/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/test_resources.py b/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/test_resources.py deleted file mode 100755 index 1c010e7..0000000 --- a/zfec/setuptools-0.6c16dev3.egg/setuptools/tests/test_resources.py +++ /dev/null @@ -1,533 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove -from unittest import TestCase, makeSuite; from pkg_resources import * -from setuptools.command.easy_install import get_script_header, is_sh -import os, pkg_resources, sys, StringIO -try: frozenset -except NameError: - from sets import ImmutableSet as frozenset - -class Metadata(EmptyProvider): - """Mock object to return metadata as if from an on-disk distribution""" - - def __init__(self,*pairs): - self.metadata = dict(pairs) - - def has_metadata(self,name): - return name in self.metadata - - def get_metadata(self,name): - return self.metadata[name] - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - -class DistroTests(TestCase): - - def testCollection(self): - # empty path should produce no distributions - ad = Environment([], platform=None, python=None) - self.assertEqual(list(ad), []) - self.assertEqual(ad['FooPkg'],[]) - ad.add(Distribution.from_filename("FooPkg-1.3_1.egg")) - ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")) - ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg")) - - # Name is in there now - self.failUnless(ad['FooPkg']) - # But only 1 package - self.assertEqual(list(ad), ['foopkg']) - - # Distributions sort by version - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] - ) - # Removing a distribution leaves sequence alone - ad.remove(ad['FooPkg'][1]) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] - ) - # And inserting adds them in order - ad.add(Distribution.from_filename("FooPkg-1.9.egg")) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] - ) - - ws = WorkingSet([]) - foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg") - foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg") - req, = parse_requirements("FooPkg>=1.3") - - # Nominal case: no distros on path, should yield all applicable - self.assertEqual(ad.best_match(req,ws).version, '1.9') - # If a matching distro is already installed, should return only that - ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4') - - # If the first matching distro is unsuitable, it's a version conflict - ws = WorkingSet([]); ws.add(foo12); ws.add(foo14) - self.assertRaises(VersionConflict, ad.best_match, req, ws) - - # If more than one match on the path, the first one takes precedence - ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14); - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - def checkFooPkg(self,d): - self.assertEqual(d.project_name, "FooPkg") - self.assertEqual(d.key, "foopkg") - self.assertEqual(d.version, "1.3-1") - self.assertEqual(d.py_version, "2.4") - self.assertEqual(d.platform, "win32") - self.assertEqual(d.parsed_version, parse_version("1.3-1")) - - def testDistroBasics(self): - d = Distribution( - "/some/path", - project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" - ) - self.checkFooPkg(d) - - d = Distribution("/some/path") - self.assertEqual(d.py_version, sys.version[:3]) - self.assertEqual(d.platform, None) - - def testDistroParse(self): - d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg") - self.checkFooPkg(d) - d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info") - self.checkFooPkg(d) - - def testDistroMetadata(self): - d = Distribution( - "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", - metadata = Metadata( - ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") - ) - ) - self.checkFooPkg(d) - - - def distRequires(self, txt): - return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) - - def checkRequires(self, dist, txt, extras=()): - self.assertEqual( - list(dist.requires(extras)), - list(parse_requirements(txt)) - ) - - def testDistroDependsSimple(self): - for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": - self.checkRequires(self.distRequires(v), v) - - - def testResolve(self): - ad = Environment([]); ws = WorkingSet([]) - # Resolving no requirements -> nothing to install - self.assertEqual( list(ws.resolve([],ad)), [] ) - # Request something not in the collection -> DistributionNotFound - self.assertRaises( - DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad - ) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.egg", - metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) - ) - ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg")) - - # Request thing(s) that are available -> list to activate - for i in range(3): - targets = list(ws.resolve(parse_requirements("Foo"), ad)) - self.assertEqual(targets, [Foo]) - map(ws.add,targets) - self.assertRaises(VersionConflict, ws.resolve, - parse_requirements("Foo==0.9"), ad) - ws = WorkingSet([]) # reset - - # Request an extra that causes an unresolved dependency for "Baz" - self.assertRaises( - DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad - ) - Baz = Distribution.from_filename( - "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) - ) - ad.add(Baz) - - # Activation list now includes resolved dependency - self.assertEqual( - list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] - ) - # Requests for conflicting versions produce VersionConflict - self.assertRaises( VersionConflict, - ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad - ) - - def testDistroDependsOptions(self): - d = self.distRequires(""" - Twisted>=1.5 - [docgen] - ZConfig>=2.0 - docutils>=0.3 - [fastcgi] - fcgiapp>=0.1""") - self.checkRequires(d,"Twisted>=1.5") - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), - ["docgen","fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), - ["fastcgi", "docgen"] - ) - self.assertRaises(UnknownExtra, d.requires, ["foo"]) - - - - - - - - - - - - - - - - - -class EntryPointTests(TestCase): - - def assertfields(self, ep): - self.assertEqual(ep.name,"foo") - self.assertEqual(ep.module_name,"setuptools.tests.test_resources") - self.assertEqual(ep.attrs, ("EntryPointTests",)) - self.assertEqual(ep.extras, ("x",)) - self.failUnless(ep.load() is EntryPointTests) - self.assertEqual( - str(ep), - "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ) - - def setUp(self): - self.dist = Distribution.from_filename( - "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) - - def testBasics(self): - ep = EntryPoint( - "foo", "setuptools.tests.test_resources", ["EntryPointTests"], - ["x"], self.dist - ) - self.assertfields(ep) - - def testParse(self): - s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ep = EntryPoint.parse(s, self.dist) - self.assertfields(ep) - - ep = EntryPoint.parse("bar baz= spammity[PING]") - self.assertEqual(ep.name,"bar baz") - self.assertEqual(ep.module_name,"spammity") - self.assertEqual(ep.attrs, ()) - self.assertEqual(ep.extras, ("ping",)) - - ep = EntryPoint.parse(" fizzly = wocka:foo") - self.assertEqual(ep.name,"fizzly") - self.assertEqual(ep.module_name,"wocka") - self.assertEqual(ep.attrs, ("foo",)) - self.assertEqual(ep.extras, ()) - - def testRejects(self): - for ep in [ - "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", - ]: - try: EntryPoint.parse(ep) - except ValueError: pass - else: raise AssertionError("Should've been bad", ep) - - def checkSubMap(self, m): - self.assertEqual(len(m), len(self.submap_expect)) - for key, ep in self.submap_expect.iteritems(): - self.assertEqual(repr(m.get(key)), repr(ep)) - - submap_expect = dict( - feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), - feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), - feature3=EntryPoint('feature3', 'this.module', extras=['something']) - ) - submap_str = """ - # define features for blah blah - feature1 = somemodule:somefunction - feature2 = another.module:SomeClass [extra1,extra2] - feature3 = this.module [something] - """ - - def testParseList(self): - self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) - self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") - self.assertRaises(ValueError, EntryPoint.parse_group, "x", - ["foo=baz", "foo=bar"]) - - def testParseMap(self): - m = EntryPoint.parse_map({'xyz':self.submap_str}) - self.checkSubMap(m['xyz']) - self.assertEqual(m.keys(),['xyz']) - m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) - self.checkSubMap(m['xyz']) - self.assertEqual(m.keys(),['xyz']) - self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) - self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) - -class RequirementsTests(TestCase): - - def testBasics(self): - r = Requirement.parse("Twisted>=1.2") - self.assertEqual(str(r),"Twisted>=1.2") - self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") - self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) - self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) - self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) - - def testOrdering(self): - r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) - r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) - self.assertEqual(r1,r2) - self.assertEqual(str(r1),str(r2)) - self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") - - def testBasicContains(self): - r = Requirement("Twisted", [('>=','1.2')], ()) - foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") - twist11 = Distribution.from_filename("Twisted-1.1.egg") - twist12 = Distribution.from_filename("Twisted-1.2.egg") - self.failUnless(parse_version('1.2') in r) - self.failUnless(parse_version('1.1') not in r) - self.failUnless('1.2' in r) - self.failUnless('1.1' not in r) - self.failUnless(foo_dist not in r) - self.failUnless(twist11 not in r) - self.failUnless(twist12 in r) - - def testAdvancedContains(self): - r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") - for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): - self.failUnless(v in r, (v,r)) - for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): - self.failUnless(v not in r, (v,r)) - - - def testOptionsAndHashing(self): - r1 = Requirement.parse("Twisted[foo,bar]>=1.2") - r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") - r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") - self.assertEqual(r1,r2) - self.assertEqual(r1,r3) - self.assertEqual(r1.extras, ("foo","bar")) - self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized - self.assertEqual(hash(r1), hash(r2)) - self.assertEqual( - hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), - frozenset(["foo","bar"]))) - ) - - def testVersionEquality(self): - r1 = Requirement.parse("setuptools==0.3a2") - r2 = Requirement.parse("setuptools!=0.3a4") - d = Distribution.from_filename - - self.failIf(d("setuptools-0.3a4.egg") in r1) - self.failIf(d("setuptools-0.3a1.egg") in r1) - self.failIf(d("setuptools-0.3a4.egg") in r2) - - self.failUnless(d("setuptools-0.3a2.egg") in r1) - self.failUnless(d("setuptools-0.3a2.egg") in r2) - self.failUnless(d("setuptools-0.3a3.egg") in r2) - self.failUnless(d("setuptools-0.3a5.egg") in r2) - - - - - - - - - - - - - - -class ParseTests(TestCase): - - def testEmptyParse(self): - self.assertEqual(list(parse_requirements('')), []) - - def testYielding(self): - for inp,out in [ - ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), - (['x\n\n','y'], ['x','y']), - ]: - self.assertEqual(list(pkg_resources.yield_lines(inp)),out) - - def testSplitting(self): - self.assertEqual( - list( - pkg_resources.split_sections(""" - x - [Y] - z - - a - [b ] - # foo - c - [ d] - [q] - v - """ - ) - ), - [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] - ) - self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) - - def testSafeName(self): - self.assertEqual(safe_name("adns-python"), "adns-python") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") - self.assertNotEqual(safe_name("peak.web"), "peak-web") - - def testSafeVersion(self): - self.assertEqual(safe_version("1.2-1"), "1.2-1") - self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") - self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") - self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") - self.assertEqual(safe_version("peak.web"), "peak.web") - - def testSimpleRequirements(self): - self.assertEqual( - list(parse_requirements('Twis-Ted>=1.2-1')), - [Requirement('Twis-Ted',[('>=','1.2-1')], ())] - ) - self.assertEqual( - list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), - [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] - ) - self.assertEqual( - Requirement.parse("FooBar==1.99a3"), - Requirement("FooBar", [('==','1.99a3')], ()) - ) - self.assertRaises(ValueError,Requirement.parse,">=2.3") - self.assertRaises(ValueError,Requirement.parse,"x\\") - self.assertRaises(ValueError,Requirement.parse,"x==2 q") - self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") - self.assertRaises(ValueError,Requirement.parse,"#") - - def testVersionEquality(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertEqual(p1,p2, (s1,s2,p1,p2)) - - c('1.2-rc1', '1.2rc1') - c('0.4', '0.4.0') - c('0.4.0.0', '0.4.0') - c('0.4.0-0', '0.4-0') - c('0pl1', '0.0pl1') - c('0pre1', '0.0c1') - c('0.0.0preview1', '0c1') - c('0.0c1', '0-rc1') - c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a') - - def testVersionOrdering(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.failUnless(p1 - "easy_install will install a package that is already there" - - - "be more like distutils with regard to --prefix=" - - - "respect the PYTHONPATH" - (Note: this patch does not work as intended when site.py has been modified. - This will be fixed in a future version.) - - - * The following patch to setuptools introduced bugs, and has been reverted - in zetuptoolz: - - $ svn log -r 45514 - ------------------------------------------------------------------------ - r45514 | phillip.eby | 2006-04-18 04:03:16 +0100 (Tue, 18 Apr 2006) | 9 lines - - Backport pkgutil, pydoc, and doctest from the 2.5 trunk to setuptools - 0.7 trunk. (Sideport?) Setuptools 0.7 will install these in place of - the 2.3/2.4 versions (at least of pydoc and doctest) to let them work - properly with eggs. pkg_resources now depends on the 2.5 pkgutil, which - is included here as _pkgutil, to work around the fact that some system - packagers will install setuptools without overriding the stdlib modules. - But users who install their own setuptools will get them, and the system - packaged people probably don't need them. - ------------------------------------------------------------------------ - - - * If unpatched setuptools decides that it needs to change an existing site.py - file that appears not to have been written by it (because the file does not - start with "def __boot():"), it aborts the installation. - zetuptoolz leaves the file alone and outputs a warning, but continues with - the installation. - - - * The scripts written by zetuptoolz have the following extra line: - - # generated by zetuptoolz - - after the header. - - - * Windows-specific changes (native Python): - - Python distributions may have command-line or GUI scripts. - On Windows, setuptools creates an executable wrapper to run each - script. zetuptools uses a different approach that does not require - an .exe wrapper. It writes approximately the same script file that - is used on other platforms, but with a .pyscript extension. - It also writes a shell-script wrapper (without any extension) that - is only used when the command is run from a Cygwin shell. - - Some of the advantages of this approach are: - - * Unicode arguments are preserved (although the program will - need to use some Windows-specific code to get at them in - current versions of Python); - * it works correctly on 64-bit Windows; - * the zetuptoolz distribution need not contain either any - binary executables, or any C code that needs to be compiled. - - See setuptools\tests\win_script_wrapper.txt for further details. - - Installing or building any distribution on Windows will automatically - associate .pyscript with the native Python interpreter for the current - user. It will also add .pyscript and .pyw to the PATHEXT variable for - the current user, which is needed to allow scripts to be run without - typing any extension. - - There is an additional setup.py command that can be used to perform - these steps separately (which isn't normally needed, but might be - useful for debugging): - - python setup.py scriptsetup - - Adding the --allusers option, i.e. - - python setup.py scriptsetup --allusers - - will make the .pyscript association and changes to the PATHEXT variable - for all users of this Windows installation, except those that have it - overridden in their per-user environment. In this case setup.py must be - run with Administrator privileges, e.g. from a Command Prompt whose - shortcut has been set to run as Administrator. diff --git a/zfec/stridetune-bench.ba.sh b/zfec/stridetune-bench.ba.sh deleted file mode 100755 index 01555ca..0000000 --- a/zfec/stridetune-bench.ba.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -/bin/rm -rf ./benchresults -mkdir benchresults -STRIDE=32 -while [ $(( $STRIDE < 32769 )) ] ; do - /bin/rm -rf build - rm zfec/_fec.so - /bin/rm -rf instdir - mkdir instdir - PYTHONPATH=instdir ./setup.py develop --install-dir=instdir --stride=${STRIDE} >/dev/null - echo $STRIDE - PYTHONPATH=instdir python -OO ./bench/bench_zfec.py >> benchresults/comp_0-stride_$STRIDE - tail -1 benchresults/comp_0-stride_$STRIDE - STRIDE=$(( $STRIDE + 32 )) -done diff --git a/zfec/stridetune-bench.py b/zfec/stridetune-bench.py deleted file mode 100755 index 02c13e1..0000000 --- a/zfec/stridetune-bench.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -import bisect, random, os, re - -from pyutil import fileutil - -assert not os.path.exists("benchresults") - -os.mkdir("benchresults") - -MIN=512 -MAX=1024 - -results = {} - -R=re.compile("ave rate: ([1-9][0-9]*)") - -def measure(stride): - fileutil.rm_dir("build") - fileutil.rm_dir("instdir") - fileutil.remove_if_possible(os.path.join("zfec", "_fec.so")) - fileutil.make_dirs("instdir") - fname = os.path.join("benchresults", "comp_0-stride_%d"%stride) - os.system("PYTHONPATH=instdir ./setup.py develop --install-dir=instdir --stride=%d >/dev/null" % stride) - os.system("PYTHONPATH=instdir python -OO ./bench/bench_zfec.py >> %s" % fname) - inf = open(fname, "rU") - for l in inf: - m = R.search(l) - if m: - result = int(m.group(1)) - if results.has_key(stride): - print "stride: %d, results: %d (dup %d)" % (stride, result, results[stride]) - else: - print "stride: %d, results: %d" % (stride, result) - results[stride] = result - break - -measure(MIN) -measure(MAX) - -while True: - stride = random.randrange(MIN, MAX+1) - measure(stride) diff --git a/zfec/stridetune-dat.bash b/zfec/stridetune-dat.bash deleted file mode 100755 index b250bff..0000000 --- a/zfec/stridetune-dat.bash +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -DAT=stridetune.dat -rm -f $DAT -for F in benchresults/comp_0-stride_*; do - NUM=${F:27} - for M in `grep "^N: " benchresults/comp_0-stride_$NUM | cut -d':' -f10-` ; do - echo "$NUM $M" >> $DAT - done -done diff --git a/zfec/stridetune-graph.py b/zfec/stridetune-graph.py deleted file mode 100755 index 7025989..0000000 --- a/zfec/stridetune-graph.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env python - -from pyx import * -def g(f): - g=graph.graphxy(width=16, x=graph.axis.linear(), y=graph.axis.linear()) - - g.plot([graph.data.file(f, x=1, y=2)]) - g.writeEPSfile(f+'.eps') - -g('stridetune.dat') diff --git a/zfec/test/__init__.py b/zfec/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/zfec/test/test_zfec.py b/zfec/test/test_zfec.py new file mode 100755 index 0000000..46bbb1c --- /dev/null +++ b/zfec/test/test_zfec.py @@ -0,0 +1,360 @@ +#!/usr/bin/env python + +import cStringIO, os, random, re + +import unittest + +global VERBOSE +VERBOSE=False + +import zfec + +from pyutil import fileutil + +from base64 import b32encode +def ab(x): # debuggery + if len(x) >= 3: + return "%s:%s" % (len(x), b32encode(x[-3:]),) + elif len(x) == 2: + return "%s:%s" % (len(x), b32encode(x[-2:]),) + elif len(x) == 1: + return "%s:%s" % (len(x), b32encode(x[-1:]),) + elif len(x) == 0: + return "%s:%s" % (len(x), "--empty--",) + +def randstr(n): + return ''.join(map(chr, map(random.randrange, [0]*n, [256]*n))) + +def _h(k, m, ss): + encer = zfec.Encoder(k, m) + nums_and_blocks = list(enumerate(encer.encode(ss))) + assert isinstance(nums_and_blocks, list), nums_and_blocks + assert len(nums_and_blocks) == m, (len(nums_and_blocks), m,) + nums_and_blocks = random.sample(nums_and_blocks, k) + blocks = [ x[1] for x in nums_and_blocks ] + nums = [ x[0] for x in nums_and_blocks ] + decer = zfec.Decoder(k, m) + decoded = decer.decode(blocks, nums) + assert len(decoded) == len(ss), (len(decoded), len(ss),) + assert tuple([str(s) for s in decoded]) == tuple([str(s) for s in ss]), (tuple([ab(str(s)) for s in decoded]), tuple([ab(str(s)) for s in ss]),) + +def _help_test_random(): + m = random.randrange(1, 257) + k = random.randrange(1, m+1) + l = random.randrange(0, 2**9) + ss = [ randstr(l/k) for x in range(k) ] + _h(k, m, ss) + +def _help_test_random_with_l(l): + m = random.randrange(1, 257) + k = random.randrange(1, m+1) + ss = [ randstr(l/k) for x in range(k) ] + _h(k, m, ss) + +def _h_easy(k, m, s): + encer = zfec.easyfec.Encoder(k, m) + nums_and_blocks = list(enumerate(encer.encode(s))) + assert isinstance(nums_and_blocks, list), nums_and_blocks + assert len(nums_and_blocks) == m, (len(nums_and_blocks), m,) + nums_and_blocks = random.sample(nums_and_blocks, k) + blocks = [ x[1] for x in nums_and_blocks ] + nums = [ x[0] for x in nums_and_blocks ] + decer = zfec.easyfec.Decoder(k, m) + + decodeds = decer.decode(blocks, nums, padlen=k*len(blocks[0]) - len(s)) + assert len(decodeds) == len(s), (ab(decodeds), ab(s), k, m) + assert decodeds == s, (ab(decodeds), ab(s),) + +def _help_test_random_easy(): + m = random.randrange(1, 257) + k = random.randrange(1, m+1) + l = random.randrange(0, 2**9) + s = randstr(l) + _h_easy(k, m, s) + +def _help_test_random_with_l_easy(l): + m = random.randrange(1, 257) + k = random.randrange(1, m+1) + s = randstr(l) + _h_easy(k, m, s) + +class ZFecTest(unittest.TestCase): + def test_instantiate_encoder_no_args(self): + try: + e = zfec.Encoder() + except TypeError: + # Okay, so that's because we're required to pass constructor args. + pass + else: + # Oops, it should have raised an exception. + self.fail("Should have raised exception from incorrect arguments to constructor.") + + def test_instantiate_decoder_no_args(self): + try: + e = zfec.Decoder() + except TypeError: + # Okay, so that's because we're required to pass constructor args. + pass + else: + # Oops, it should have raised an exception. + self.fail("Should have raised exception from incorrect arguments to constructor.") + + def test_from_agl_c(self): + self.failUnless(zfec._fec.test_from_agl()) + + def test_from_agl_py(self): + e = zfec.Encoder(3, 5) + b0 = '\x01'*8 ; b1 = '\x02'*8 ; b2 = '\x03'*8 + # print "_from_py before encoding:" + # print "b0: %s, b1: %s, b2: %s" % tuple(base64.b16encode(x) for x in [b0, b1, b2]) + + b3, b4 = e.encode([b0, b1, b2], (3, 4)) + # print "after encoding:" + # print "b3: %s, b4: %s" % tuple(base64.b16encode(x) for x in [b3, b4]) + + d = zfec.Decoder(3, 5) + r0, r1, r2 = d.decode((b2, b3, b4), (1, 2, 3)) + + # print "after decoding:" + # print "b0: %s, b1: %s" % tuple(base64.b16encode(x) for x in [b0, b1]) + + def test_small(self): + for i in range(16): + _help_test_random_with_l(i) + if VERBOSE: + print "%d randomized tests pass." % (i+1) + + def test_random(self): + for i in range(3): + _help_test_random() + if VERBOSE: + print "%d randomized tests pass." % (i+1) + + def test_bad_args_construct_decoder(self): + try: + zfec.Decoder(-1, -1) + except zfec.Error, e: + assert "argument is required to be greater than or equal to 1" in str(e), e + else: + self.fail("Should have gotten an exception from out-of-range arguments.") + + try: + zfec.Decoder(1, 257) + except zfec.Error, e: + assert "argument is required to be less than or equal to 256" in str(e), e + else: + self.fail("Should have gotten an exception from out-of-range arguments.") + + try: + zfec.Decoder(3, 2) + except zfec.Error, e: + assert "first argument is required to be less than or equal to the second argument" in str(e), e + else: + self.fail("Should have gotten an exception from out-of-range arguments.") + + def test_bad_args_construct_encoder(self): + try: + zfec.Encoder(-1, -1) + except zfec.Error, e: + assert "argument is required to be greater than or equal to 1" in str(e), e + else: + self.fail("Should have gotten an exception from out-of-range arguments.") + + try: + zfec.Encoder(1, 257) + except zfec.Error, e: + assert "argument is required to be less than or equal to 256" in str(e), e + else: + self.fail("Should have gotten an exception from out-of-range arguments.") + + def test_bad_args_dec(self): + decer = zfec.Decoder(2, 4) + + try: + decer.decode(98, []) # first argument is not a sequence + except TypeError, e: + assert "First argument was not a sequence" in str(e), e + else: + self.fail("Should have gotten TypeError for wrong type of second argument.") + + try: + decer.decode(["a", "b", ], ["c", "d",]) + except zfec.Error, e: + assert "Precondition violation: second argument is required to contain int" in str(e), e + else: + self.fail("Should have gotten zfec.Error for wrong type of second argument.") + + try: + decer.decode(["a", "b", ], 98) # not a sequence at all + except TypeError, e: + assert "Second argument was not a sequence" in str(e), e + else: + self.fail("Should have gotten TypeError for wrong type of second argument.") + +class EasyFecTest(unittest.TestCase): + def test_small(self): + for i in range(16): + _help_test_random_with_l_easy(i) + if VERBOSE: + print "%d randomized tests pass." % (i+1) + + def test_random(self): + for i in range(3): + _help_test_random_easy() + if VERBOSE: + print "%d randomized tests pass." % (i+1) + + def test_bad_args_dec(self): + decer = zfec.easyfec.Decoder(2, 4) + + try: + decer.decode(98, [0, 1], 0) # first argument is not a sequence + except TypeError, e: + assert "First argument was not a sequence" in str(e), e + else: + self.fail("Should have gotten TypeError for wrong type of second argument.") + + try: + decer.decode("ab", ["c", "d",], 0) + except zfec.Error, e: + assert "Precondition violation: second argument is required to contain int" in str(e), e + else: + self.fail("Should have gotten zfec.Error for wrong type of second argument.") + + try: + decer.decode("ab", 98, 0) # not a sequence at all + except TypeError, e: + assert "Second argument was not a sequence" in str(e), e + else: + self.fail("Should have gotten TypeError for wrong type of second argument.") + +class FileFec(unittest.TestCase): + def test_filefec_header(self): + for m in [1, 2, 3, 5, 7, 9, 11, 17, 19, 33, 35, 65, 66, 67, 129, 130, 131, 254, 255, 256,]: + for k in [1, 2, 3, 5, 9, 17, 33, 65, 129, 255, 256,]: + if k >= m: + continue + for pad in [0, 1, k-1,]: + if pad >= k: + continue + for sh in [0, 1, m-1,]: + if sh >= m: + continue + h = zfec.filefec._build_header(m, k, pad, sh) + hio = cStringIO.StringIO(h) + (rm, rk, rpad, rsh,) = zfec.filefec._parse_header(hio) + assert (rm, rk, rpad, rsh,) == (m, k, pad, sh,), h + + def _help_test_filefec(self, teststr, k, m, numshs=None): + if numshs == None: + numshs = m + + TESTFNAME = "testfile.txt" + PREFIX = "test" + SUFFIX = ".fec" + + fsize = len(teststr) + + tempdir = fileutil.NamedTemporaryDirectory(cleanup=True) + try: + tempf = tempdir.file(TESTFNAME, 'w+b') + tempf.write(teststr) + tempf.flush() + tempf.seek(0) + + # encode the file + zfec.filefec.encode_to_files(tempf, fsize, tempdir.name, PREFIX, k, m, SUFFIX, verbose=VERBOSE) + + # select some share files + RE=re.compile(zfec.filefec.RE_FORMAT % (PREFIX, SUFFIX,)) + fns = os.listdir(tempdir.name) + assert len(fns) >= m, (fns, tempdir, tempdir.name,) + sharefs = [ open(os.path.join(tempdir.name, fn), "rb") for fn in fns if RE.match(fn) ] + for sharef in sharefs: + tempdir.register_file(sharef) + random.shuffle(sharefs) + del sharefs[numshs:] + + # decode from the share files + outf = tempdir.file('recovered-testfile.txt', 'w+b') + zfec.filefec.decode_from_files(outf, sharefs, verbose=VERBOSE) + outf.flush() + outf.seek(0) + recovereddata = outf.read() + assert recovereddata == teststr, (ab(recovereddata), ab(teststr),) + finally: + tempdir.shutdown() + + def test_filefec_all_shares(self): + return self._help_test_filefec("Yellow Whirled!", 3, 8) + + def test_filefec_all_shares_1_b(self): + return self._help_test_filefec("Yellow Whirled!", 4, 16) + + def test_filefec_all_shares_2(self): + return self._help_test_filefec("Yellow Whirled", 3, 8) + + def test_filefec_all_shares_2_b(self): + return self._help_test_filefec("Yellow Whirled", 4, 16) + + def test_filefec_all_shares_3(self): + return self._help_test_filefec("Yellow Whirle", 3, 8) + + def test_filefec_all_shares_3_b(self): + return self._help_test_filefec("Yellow Whirle", 4, 16) + + def test_filefec_all_shares_with_padding(self, noisy=VERBOSE): + return self._help_test_filefec("Yellow Whirled!A", 3, 8) + + def test_filefec_min_shares_with_padding(self, noisy=VERBOSE): + return self._help_test_filefec("Yellow Whirled!A", 3, 8, numshs=3) + + def test_filefec_min_shares_with_crlf(self, noisy=VERBOSE): + return self._help_test_filefec("llow Whirled!A\r\n", 3, 8, numshs=3) + + def test_filefec_min_shares_with_lf(self, noisy=VERBOSE): + return self._help_test_filefec("Yellow Whirled!A\n", 3, 8, numshs=3) + + def test_filefec_min_shares_with_lflf(self, noisy=VERBOSE): + return self._help_test_filefec("Yellow Whirled!A\n\n", 3, 8, numshs=3) + + def test_filefec_min_shares_with_crcrlflf(self, noisy=VERBOSE): + return self._help_test_filefec("Yellow Whirled!A\r\r\n\n", 3, 8, numshs=3) + + +class Cmdline(unittest.TestCase): + def test_basic(self, noisy=VERBOSE): + tempdir = fileutil.NamedTemporaryDirectory(cleanup=True) + fo = tempdir.file("test.data", "w+b") + fo.write("WHEHWHJEKWAHDLJAWDHWALKDHA") + + import sys + realargv = sys.argv + try: + DEFAULT_M=8 + DEFAULT_K=3 + sys.argv = ["zfec", os.path.join(tempdir.name, "test.data"),] + + retcode = zfec.cmdline_zfec.main() + assert retcode == 0, retcode + + RE=re.compile(zfec.filefec.RE_FORMAT % ('test.data', ".fec",)) + fns = os.listdir(tempdir.name) + assert len(fns) >= DEFAULT_M, (fns, DEFAULT_M, tempdir, tempdir.name,) + sharefns = [ os.path.join(tempdir.name, fn) for fn in fns if RE.match(fn) ] + random.shuffle(sharefns) + del sharefns[DEFAULT_K:] + + sys.argv = ["zunfec",] + sys.argv.extend(sharefns) + sys.argv.extend(['-o', os.path.join(tempdir.name, 'test.data-recovered'),]) + + retcode = zfec.cmdline_zunfec.main() + assert retcode == 0, retcode + import filecmp + assert filecmp.cmp(os.path.join(tempdir.name, 'test.data'), os.path.join(tempdir.name, 'test.data-recovered')) + finally: + sys.argv = realargv + +if __name__ == "__main__": + unittest.main() diff --git a/zfec/zfec.egg-info/stdeb.cfg b/zfec/zfec.egg-info/stdeb.cfg deleted file mode 100644 index 69707c1..0000000 --- a/zfec/zfec.egg-info/stdeb.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[zfec] -Copyright-File: copyright diff --git a/zfec/zfec/__init__.py b/zfec/zfec/__init__.py deleted file mode 100644 index 475acf9..0000000 --- a/zfec/zfec/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -zfec -- fast forward error correction library with Python interface - -maintainer web site: U{http://tahoe-lafs.org/source/zfec} - -zfec web site: U{http://tahoe-lafs.org/source/zfec} -""" - -__version__ = "unknown" -try: - from _version import __version__ -except ImportError: - # We're running in a tree that hasn't run darcsver, and didn't come with a - # _version.py, so we don't know what our version is. This should not happen - # very often. - pass - -from _fec import Encoder, Decoder, Error -import easyfec, filefec, cmdline_zfec, cmdline_zunfec - -quiet_pyflakes=[__version__, Error, Encoder, Decoder, cmdline_zunfec, filefec, cmdline_zfec, easyfec] - -# zfec -- fast forward error correction library with Python interface -# -# Copyright (C) 2007-2010 Allmydata, Inc. -# Author: Zooko Wilcox-O'Hearn -# mailto:zooko@zooko.com -# -# This file is part of zfec. -# -# See README.rst for licensing information. diff --git a/zfec/zfec/_fecmodule.c b/zfec/zfec/_fecmodule.c deleted file mode 100644 index a742051..0000000 --- a/zfec/zfec/_fecmodule.c +++ /dev/null @@ -1,648 +0,0 @@ -/** - * zfec -- fast forward error correction library with Python interface - */ - -#include -#include -#include - - -#if (PY_VERSION_HEX < 0x02050000) -typedef int Py_ssize_t; -#endif - -#include "fec.h" - -#include "stdarg.h" - -static PyObject *py_fec_error; - -static char fec__doc__[] = "\ -FEC - Forward Error Correction \n\ -"; - -static char Encoder__doc__[] = "\ -Hold static encoder state (an in-memory table for matrix multiplication), and k and m parameters, and provide {encode()} method.\n\n\ -@param k: the number of packets required for reconstruction \n\ -@param m: the number of packets generated \n\ -"; - -typedef struct { - PyObject_HEAD - - /* expose these */ - unsigned short kk; - unsigned short mm; - - /* internal */ - fec_t* fec_matrix; -} Encoder; - -static PyObject * -Encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwdict) { - Encoder *self; - - self = (Encoder*)type->tp_alloc(type, 0); - if (self != NULL) { - self->kk = 0; - self->mm = 0; - self->fec_matrix = NULL; - } - - return (PyObject *)self; -} - -static int -Encoder_init(Encoder *self, PyObject *args, PyObject *kwdict) { - static char *kwlist[] = { - "k", - "m", - NULL - }; - int ink, inm; - if (!PyArg_ParseTupleAndKeywords(args, kwdict, "ii:Encoder.__init__", kwlist, &ink, &inm)) - return -1; - - if (ink < 1) { - PyErr_Format(py_fec_error, "Precondition violation: first argument is required to be greater than or equal to 1, but it was %d", ink); - return -1; - } - if (inm < 1) { - PyErr_Format(py_fec_error, "Precondition violation: second argument is required to be greater than or equal to 1, but it was %d", inm); - return -1; - } - if (inm > 256) { - PyErr_Format(py_fec_error, "Precondition violation: second argument is required to be less than or equal to 256, but it was %d", inm); - return -1; - } - if (ink > inm) { - PyErr_Format(py_fec_error, "Precondition violation: first argument is required to be less than or equal to the second argument, but they were %d and %d respectively", ink, inm); - return -1; - } - self->kk = (unsigned short)ink; - self->mm = (unsigned short)inm; - self->fec_matrix = fec_new(self->kk, self->mm); - - return 0; -} - -static char Encoder_encode__doc__[] = "\ -Encode data into m packets.\n\ -\n\ -@param inblocks: a sequence of k buffers of data to encode -- these are the k primary blocks, i.e. the input data split into k pieces (for best performance, make it a tuple instead of a list); All blocks are required to be the same length.\n\ -@param desired_blocks_nums optional sequence of blocknums indicating which blocks to produce and return; If None, all m blocks will be returned (in order). (For best performance, make it a tuple instead of a list.)\n\ -@returns: a list of buffers containing the requested blocks; Note that if any of the input blocks were 'primary blocks', i.e. their blocknum was < k, then the result sequence will contain a Python reference to the same Python object as was passed in. As long as the Python object in question is immutable (i.e. a string) then you don't have to think about this detail, but if it is mutable (i.e. an array), then you have to be aware that if you subsequently mutate the contents of that object then that will also change the contents of the sequence that was returned from this call to encode().\n\ -"; - -static PyObject * -Encoder_encode(Encoder *self, PyObject *args) { - PyObject* inblocks; - PyObject* desired_blocks_nums = NULL; /* The blocknums of the blocks that should be returned. */ - PyObject* result = NULL; - - gf** check_blocks_produced = (gf**)alloca((self->mm - self->kk) * sizeof(PyObject*)); /* This is an upper bound -- we will actually use only num_check_blocks_produced of these elements (see below). */ - PyObject** pystrs_produced = (PyObject**)alloca((self->mm - self->kk) * sizeof(PyObject*)); /* This is an upper bound -- we will actually use only num_check_blocks_produced of these elements (see below). */ - unsigned num_check_blocks_produced = 0; /* The first num_check_blocks_produced elements of the check_blocks_produced array and of the pystrs_produced array will be used. */ - const gf** incblocks = (const gf**)alloca(self->kk * sizeof(const gf*)); - unsigned num_desired_blocks; - PyObject* fast_desired_blocks_nums = NULL; - PyObject** fast_desired_blocks_nums_items; - unsigned* c_desired_blocks_nums = (unsigned*)alloca(self->mm * sizeof(unsigned)); - unsigned* c_desired_checkblocks_ids = (unsigned*)alloca((self->mm - self->kk) * sizeof(unsigned)); - unsigned i; - PyObject* fastinblocks = NULL; - PyObject** fastinblocksitems; - Py_ssize_t sz, oldsz = 0; - unsigned char check_block_index = 0; /* index into the check_blocks_produced and (parallel) pystrs_produced arrays */ - - if (!PyArg_ParseTuple(args, "O|O:Encoder.encode", &inblocks, &desired_blocks_nums)) - return NULL; - - for (i=0; imm - self->kk; i++) - pystrs_produced[i] = NULL; - if (desired_blocks_nums) { - fast_desired_blocks_nums = PySequence_Fast(desired_blocks_nums, "Second argument (optional) was not a sequence."); - if (!fast_desired_blocks_nums) - goto err; - num_desired_blocks = PySequence_Fast_GET_SIZE(fast_desired_blocks_nums); - fast_desired_blocks_nums_items = PySequence_Fast_ITEMS(fast_desired_blocks_nums); - for (i=0; i= self->kk) - num_check_blocks_produced++; - } - } else { - num_desired_blocks = self->mm; - for (i=0; imm - self->kk; - } - - fastinblocks = PySequence_Fast(inblocks, "First argument was not a sequence."); - if (!fastinblocks) - goto err; - - if (PySequence_Fast_GET_SIZE(fastinblocks) != self->kk) { - PyErr_Format(py_fec_error, "Precondition violation: Wrong length -- first argument (the sequence of input blocks) is required to contain exactly k blocks. len(first): %Zu, k: %d", PySequence_Fast_GET_SIZE(fastinblocks), self->kk); - goto err; - } - - /* Construct a C array of gf*'s of the input data. */ - fastinblocksitems = PySequence_Fast_ITEMS(fastinblocks); - if (!fastinblocksitems) - goto err; - - for (i=0; ikk; i++) { - if (!PyObject_CheckReadBuffer(fastinblocksitems[i])) { - PyErr_Format(py_fec_error, "Precondition violation: %u'th item is required to offer the single-segment read character buffer protocol, but it does not.", i); - goto err; - } - if (PyObject_AsReadBuffer(fastinblocksitems[i], (const void**)&(incblocks[i]), &sz)) - goto err; - if (oldsz != 0 && oldsz != sz) { - PyErr_Format(py_fec_error, "Precondition violation: Input blocks are required to be all the same length. length of one block was: %Zu, length of another block was: %Zu", oldsz, sz); - goto err; - } - oldsz = sz; - } - - /* Allocate space for all of the check blocks. */ - - for (i=0; i= self->kk) { - c_desired_checkblocks_ids[check_block_index] = c_desired_blocks_nums[i]; - pystrs_produced[check_block_index] = PyString_FromStringAndSize(NULL, sz); - if (pystrs_produced[check_block_index] == NULL) - goto err; - check_blocks_produced[check_block_index] = (gf*)PyString_AsString(pystrs_produced[check_block_index]); - if (check_blocks_produced[check_block_index] == NULL) - goto err; - check_block_index++; - } - } - assert (check_block_index == num_check_blocks_produced); - - /* Encode any check blocks that are needed. */ - fec_encode(self->fec_matrix, incblocks, check_blocks_produced, c_desired_checkblocks_ids, num_check_blocks_produced, sz); - - /* Wrap all requested blocks up into a Python list of Python strings. */ - result = PyList_New(num_desired_blocks); - if (result == NULL) - goto err; - check_block_index = 0; - for (i=0; ikk) { - Py_INCREF(fastinblocksitems[c_desired_blocks_nums[i]]); - if (PyList_SetItem(result, i, fastinblocksitems[c_desired_blocks_nums[i]]) == -1) { - Py_DECREF(fastinblocksitems[c_desired_blocks_nums[i]]); - goto err; - } - } else { - if (PyList_SetItem(result, i, pystrs_produced[check_block_index]) == -1) - goto err; - pystrs_produced[check_block_index] = NULL; - check_block_index++; - } - } - - goto cleanup; - err: - for (i=0; ifec_matrix) - fec_free(self->fec_matrix); - self->ob_type->tp_free((PyObject*)self); -} - -static PyMethodDef Encoder_methods[] = { - {"encode", (PyCFunction)Encoder_encode, METH_VARARGS, Encoder_encode__doc__}, - {NULL}, -}; - -static PyMemberDef Encoder_members[] = { - {"k", T_SHORT, offsetof(Encoder, kk), READONLY, "k"}, - {"m", T_SHORT, offsetof(Encoder, mm), READONLY, "m"}, - {NULL} /* Sentinel */ -}; - -static PyTypeObject Encoder_type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ - "_fec.Encoder", /*tp_name*/ - sizeof(Encoder), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - (destructor)Encoder_dealloc, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_compare*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash */ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ - Encoder__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Encoder_methods, /* tp_methods */ - Encoder_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)Encoder_init, /* tp_init */ - 0, /* tp_alloc */ - Encoder_new, /* tp_new */ -}; - -static char Decoder__doc__[] = "\ -Hold static decoder state (an in-memory table for matrix multiplication), and k and m parameters, and provide {decode()} method.\n\n\ -@param k: the number of packets required for reconstruction \n\ -@param m: the number of packets generated \n\ -"; - -typedef struct { - PyObject_HEAD - - /* expose these */ - unsigned short kk; - unsigned short mm; - - /* internal */ - fec_t* fec_matrix; -} Decoder; - -static PyObject * -Decoder_new(PyTypeObject *type, PyObject *args, PyObject *kwdict) { - Decoder *self; - - self = (Decoder*)type->tp_alloc(type, 0); - if (self != NULL) { - self->kk = 0; - self->mm = 0; - self->fec_matrix = NULL; - } - - return (PyObject *)self; -} - -static int -Decoder_init(Encoder *self, PyObject *args, PyObject *kwdict) { - static char *kwlist[] = { - "k", - "m", - NULL - }; - - int ink, inm; - if (!PyArg_ParseTupleAndKeywords(args, kwdict, "ii:Decoder.__init__", kwlist, &ink, &inm)) - return -1; - - if (ink < 1) { - PyErr_Format(py_fec_error, "Precondition violation: first argument is required to be greater than or equal to 1, but it was %d", ink); - return -1; - } - if (inm < 1) { - PyErr_Format(py_fec_error, "Precondition violation: second argument is required to be greater than or equal to 1, but it was %d", inm); - return -1; - } - if (inm > 256) { - PyErr_Format(py_fec_error, "Precondition violation: second argument is required to be less than or equal to 256, but it was %d", inm); - return -1; - } - if (ink > inm) { - PyErr_Format(py_fec_error, "Precondition violation: first argument is required to be less than or equal to the second argument, but they were %d and %d respectively", ink, inm); - return -1; - } - self->kk = (unsigned short)ink; - self->mm = (unsigned short)inm; - self->fec_matrix = fec_new(self->kk, self->mm); - - return 0; -} - -#define SWAP(a,b,t) {t tmp; tmp=a; a=b; b=tmp;} - -static char Decoder_decode__doc__[] = "\ -Decode a list blocks into a list of segments.\n\ -@param blocks a sequence of buffers containing block data (for best performance, make it a tuple instead of a list)\n\ -@param blocknums a sequence of integers of the blocknum for each block in blocks (for best performance, make it a tuple instead of a list)\n\ -\n\ -@return a list of strings containing the segment data (i.e. ''.join(retval) yields a string containing the decoded data)\n\ -"; - -static PyObject * -Decoder_decode(Decoder *self, PyObject *args) { - PyObject*restrict blocks; - PyObject*restrict blocknums; - PyObject* result = NULL; - - const gf**restrict cblocks = (const gf**restrict)alloca(self->kk * sizeof(const gf*)); - unsigned* cblocknums = (unsigned*)alloca(self->kk * sizeof(unsigned)); - gf**restrict recoveredcstrs = (gf**)alloca(self->kk * sizeof(gf*)); /* self->kk is actually an upper bound -- we probably won't need all of this space. */ - PyObject**restrict recoveredpystrs = (PyObject**restrict)alloca(self->kk * sizeof(PyObject*)); /* self->kk is actually an upper bound -- we probably won't need all of this space. */ - unsigned i; - PyObject*restrict fastblocknums = NULL; - PyObject*restrict fastblocks; - unsigned needtorecover=0; - PyObject** fastblocksitems; - PyObject** fastblocknumsitems; - Py_ssize_t sz, oldsz = 0; - long tmpl; - unsigned nextrecoveredix=0; - - if (!PyArg_ParseTuple(args, "OO:Decoder.decode", &blocks, &blocknums)) - return NULL; - - for (i=0; ikk; i++) - recoveredpystrs[i] = NULL; - fastblocks = PySequence_Fast(blocks, "First argument was not a sequence."); - if (!fastblocks) - goto err; - fastblocknums = PySequence_Fast(blocknums, "Second argument was not a sequence."); - if (!fastblocknums) - goto err; - - if (PySequence_Fast_GET_SIZE(fastblocks) != self->kk) { - PyErr_Format(py_fec_error, "Precondition violation: Wrong length -- first argument is required to contain exactly k blocks. len(first): %Zu, k: %d", PySequence_Fast_GET_SIZE(fastblocks), self->kk); - goto err; - } - if (PySequence_Fast_GET_SIZE(fastblocknums) != self->kk) { - PyErr_Format(py_fec_error, "Precondition violation: Wrong length -- blocknums is required to contain exactly k blocks. len(blocknums): %Zu, k: %d", PySequence_Fast_GET_SIZE(fastblocknums), self->kk); - goto err; - } - - /* Construct a C array of gf*'s of the data and another of C ints of the blocknums. */ - fastblocknumsitems = PySequence_Fast_ITEMS(fastblocknums); - if (!fastblocknumsitems) - goto err; - fastblocksitems = PySequence_Fast_ITEMS(fastblocks); - if (!fastblocksitems) - goto err; - - for (i=0; ikk; i++) { - if (!PyInt_Check(fastblocknumsitems[i])) { - PyErr_Format(py_fec_error, "Precondition violation: second argument is required to contain int."); - goto err; - } - tmpl = PyInt_AsLong(fastblocknumsitems[i]); - if (tmpl < 0 || tmpl > 255) { - PyErr_Format(py_fec_error, "Precondition violation: block nums can't be less than zero or greater than 255. %ld\n", tmpl); - goto err; - } - cblocknums[i] = (unsigned)tmpl; - if (cblocknums[i] >= self->kk) - needtorecover+=1; - - if (!PyObject_CheckReadBuffer(fastblocksitems[i])) { - PyErr_Format(py_fec_error, "Precondition violation: %u'th item is required to offer the single-segment read character buffer protocol, but it does not.\n", i); - goto err; - } - if (PyObject_AsReadBuffer(fastblocksitems[i], (const void**)&(cblocks[i]), &sz)) - goto err; - if (oldsz != 0 && oldsz != sz) { - PyErr_Format(py_fec_error, "Precondition violation: Input blocks are required to be all the same length. length of one block was: %Zu, length of another block was: %Zu\n", oldsz, sz); - goto err; - } - oldsz = sz; - } - - /* Move src packets into position. At the end of this loop we want the i'th - element of the arrays to be the block with block number i, if that block - is among our inputs. */ - for (i=0; ikk;) { - if (cblocknums[i] >= self->kk || cblocknums[i] == i) - i++; - else { - /* put pkt in the right position. */ - unsigned c = cblocknums[i]; - - SWAP (cblocknums[i], cblocknums[c], int); - SWAP (cblocks[i], cblocks[c], const gf*); - SWAP (fastblocksitems[i], fastblocksitems[c], PyObject*); - } - } - - /* Allocate space for all of the recovered blocks. */ - for (i=0; ifec_matrix, cblocks, recoveredcstrs, cblocknums, sz); - - /* Wrap up both original primary blocks and decoded blocks into a Python list of Python strings. */ - result = PyList_New(self->kk); - if (result == NULL) - goto err; - for (i=0; ikk; i++) { - if (cblocknums[i] == i) { - /* Original primary block. */ - Py_INCREF(fastblocksitems[i]); - if (PyList_SetItem(result, i, fastblocksitems[i]) == -1) { - Py_DECREF(fastblocksitems[i]); - goto err; - } - } else { - /* Recovered block. */ - if (PyList_SetItem(result, i, recoveredpystrs[nextrecoveredix]) == -1) - goto err; - recoveredpystrs[nextrecoveredix] = NULL; - nextrecoveredix++; - } - } - - goto cleanup; - err: - for (i=0; ikk; i++) - Py_XDECREF(recoveredpystrs[i]); - Py_XDECREF(result); result = NULL; - cleanup: - Py_XDECREF(fastblocks); fastblocks=NULL; - Py_XDECREF(fastblocknums); fastblocknums=NULL; - return result; -} - -static void -Decoder_dealloc(Decoder * self) { - if (self->fec_matrix) - fec_free(self->fec_matrix); - self->ob_type->tp_free((PyObject*)self); -} - -static PyMethodDef Decoder_methods[] = { - {"decode", (PyCFunction)Decoder_decode, METH_VARARGS, Decoder_decode__doc__}, - {NULL}, -}; - -static PyMemberDef Decoder_members[] = { - {"k", T_SHORT, offsetof(Encoder, kk), READONLY, "k"}, - {"m", T_SHORT, offsetof(Encoder, mm), READONLY, "m"}, - {NULL} /* Sentinel */ -}; - -static PyTypeObject Decoder_type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ - "_fec.Decoder", /*tp_name*/ - sizeof(Decoder), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - (destructor)Decoder_dealloc, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_compare*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash */ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ - Decoder__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Decoder_methods, /* tp_methods */ - Decoder_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)Decoder_init, /* tp_init */ - 0, /* tp_alloc */ - Decoder_new, /* tp_new */ -}; - - -void -_hexwrite(unsigned char*s, size_t l) { - size_t i; - for (i = 0; i < l; i++) - printf("%.2x", s[i]); -} - - -PyObject* -test_from_agl(PyObject* self, PyObject* args) { - unsigned char b0c[8], b1c[8]; - unsigned char b0[8], b1[8], b2[8], b3[8], b4[8]; - - const unsigned char *blocks[3] = {b0, b1, b2}; - unsigned char *outblocks[2] = {b3, b4}; - unsigned block_nums[] = {3, 4}; - - fec_t *const fec = fec_new(3, 5); - - const unsigned char *inpkts[] = {b3, b4, b2}; - unsigned char *outpkts[] = {b0, b1}; - unsigned indexes[] = {3, 4, 2}; - - memset(b0, 1, 8); - memset(b1, 2, 8); - memset(b2, 3, 8); - - /*printf("_from_c before encoding:\n"); - printf("b0: "); _hexwrite(b0, 8); printf(", "); - printf("b1: "); _hexwrite(b1, 8); printf(", "); - printf("b2: "); _hexwrite(b2, 8); printf(", "); - printf("\n");*/ - - fec_encode(fec, blocks, outblocks, block_nums, 2, 8); - - /*printf("after encoding:\n"); - printf("b3: "); _hexwrite(b3, 8); printf(", "); - printf("b4: "); _hexwrite(b4, 8); printf(", "); - printf("\n");*/ - - memcpy(b0c, b0, 8); memcpy(b1c, b1, 8); - - fec_decode(fec, inpkts, outpkts, indexes, 8); - - /*printf("after decoding:\n"); - printf("b0: "); _hexwrite(b0, 8); printf(", "); - printf("b1: "); _hexwrite(b1, 8); - printf("\n");*/ - - if ((memcmp(b0, b0c,8) == 0) && (memcmp(b1, b1c,8) == 0)) - Py_RETURN_TRUE; - else - Py_RETURN_FALSE; -} - -static PyMethodDef fec_functions[] = { - {"test_from_agl", test_from_agl, METH_NOARGS, NULL}, - {NULL} -}; - -#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ -#define PyMODINIT_FUNC void -#endif -PyMODINIT_FUNC -init_fec(void) { - PyObject *module; - PyObject *module_dict; - - if (PyType_Ready(&Encoder_type) < 0) - return; - if (PyType_Ready(&Decoder_type) < 0) - return; - - module = Py_InitModule3("_fec", fec_functions, fec__doc__); - if (module == NULL) - return; - - Py_INCREF(&Encoder_type); - Py_INCREF(&Decoder_type); - - PyModule_AddObject(module, "Encoder", (PyObject *)&Encoder_type); - PyModule_AddObject(module, "Decoder", (PyObject *)&Decoder_type); - - module_dict = PyModule_GetDict(module); - py_fec_error = PyErr_NewException("_fec.Error", NULL, NULL); - PyDict_SetItemString(module_dict, "Error", py_fec_error); -} - -/** - * originally inspired by fecmodule.c by the Mnet Project, especially Myers - * Carpenter and Hauke Johannknecht - */ diff --git a/zfec/zfec/cmdline_zfec.py b/zfec/zfec/cmdline_zfec.py deleted file mode 100755 index 2eee2b0..0000000 --- a/zfec/zfec/cmdline_zfec.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python - -# zfec -- a fast C implementation of Reed-Solomon erasure coding with -# command-line, C, and Python interfaces - -import sys - -import argparse -import filefec - -from zfec import __version__ as libversion -__version__ = libversion - -DEFAULT_K=3 -DEFAULT_M=8 - -def main(): - - if '-V' in sys.argv or '--version' in sys.argv: - print "zfec library version: ", libversion - print "zfec command-line tool version: ", __version__ - sys.exit(0) - - parser = argparse.ArgumentParser(description="Encode a file into a set of share files, a subset of which can later be used to recover the original file.") - - parser.add_argument('inputfile', help='file to encode or "-" for stdin', type=argparse.FileType('rb'), metavar='INF') - parser.add_argument('-d', '--output-dir', help='directory in which share file names will be created (default ".")', default='.', metavar='D') - parser.add_argument('-p', '--prefix', help='prefix for share file names; If omitted, the name of the input file will be used.', metavar='P') - parser.add_argument('-s', '--suffix', help='suffix for share file names (default ".fec")', default='.fec', metavar='S') - parser.add_argument('-m', '--totalshares', help='the total number of share files created (default %d)' % DEFAULT_M, default=DEFAULT_M, type=int, metavar='M') - parser.add_argument('-k', '--requiredshares', help='the number of share files required to reconstruct (default %d)' % DEFAULT_K, default=DEFAULT_K, type=int, metavar='K') - parser.add_argument('-f', '--force', help='overwrite any file which already in place an output file (share file)', action='store_true') - parser.add_argument('-v', '--verbose', help='print out messages about progress', action='store_true') - parser.add_argument('-q', '--quiet', help='quiet progress indications and warnings about silly choices of K and M', action='store_true') - parser.add_argument('-V', '--version', help='print out version number and exit', action='store_true') - args = parser.parse_args() - - if args.prefix is None: - args.prefix = args.inputfile.name - if args.prefix == "": - args.prefix = "" - - if args.verbose and args.quiet: - print "Please choose only one of --verbose and --quiet." - sys.exit(1) - - if args.totalshares > 256 or args.totalshares < 1: - print "Invalid parameters, totalshares is required to be <= 256 and >= 1\nPlease see the accompanying documentation." - sys.exit(1) - if args.requiredshares > args.totalshares or args.requiredshares < 1: - print "Invalid parameters, requiredshares is required to be <= totalshares and >= 1\nPlease see the accompanying documentation." - sys.exit(1) - - if not args.quiet: - if args.requiredshares == 1: - print "warning: silly parameters: requiredshares == 1, which means that every share will be a complete copy of the file. You could use \"cp\" for the same effect. But proceeding to do it anyway..." - if args.requiredshares == args.totalshares: - print "warning: silly parameters: requiredshares == totalshares, which means that all shares will be required in order to reconstruct the file. You could use \"split\" for the same effect. But proceeding to do it anyway..." - - args.inputfile.seek(0, 2) - fsize = args.inputfile.tell() - args.inputfile.seek(0, 0) - return filefec.encode_to_files(args.inputfile, fsize, args.output_dir, args.prefix, args.requiredshares, args.totalshares, args.suffix, args.force, args.verbose) - -# zfec -- fast forward error correction library with Python interface -# -# Copyright (C) 2007 Allmydata, Inc. -# Author: Zooko Wilcox-O'Hearn -# -# This file is part of zfec. -# -# See README.rst for licensing information. diff --git a/zfec/zfec/cmdline_zunfec.py b/zfec/zfec/cmdline_zunfec.py deleted file mode 100755 index 8fc0c04..0000000 --- a/zfec/zfec/cmdline_zunfec.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python - -# zfec -- a fast C implementation of Reed-Solomon erasure coding with -# command-line, C, and Python interfaces - -import os, sys - -import argparse -import filefec - -from zfec import __version__ as libversion -__version__ = libversion - -def main(): - if '-V' in sys.argv or '--version' in sys.argv: - print "zfec library version: ", libversion - print "zunfec command-line tool version: ", __version__ - return 0 - - parser = argparse.ArgumentParser(description="Decode data from share files.") - - parser.add_argument('-o', '--outputfile', required=True, help='file to write the resulting data to, or "-" for stdout', type=str, metavar='OUTF') - parser.add_argument('sharefiles', nargs='*', help='shares file to read the encoded data from', type=unicode, metavar='SHAREFILE') - parser.add_argument('-v', '--verbose', help='print out messages about progress', action='store_true') - parser.add_argument('-f', '--force', help='overwrite any file which already in place of the output file', action='store_true') - parser.add_argument('-V', '--version', help='print out version number and exit', action='store_true') - args = parser.parse_args() - - if len(args.sharefiles) < 2: - print "At least two sharefiles are required." - return 1 - - if args.force: - outf = open(args.outputfile, 'wb') - else: - try: - flags = os.O_WRONLY|os.O_CREAT|os.O_EXCL | (hasattr(os, 'O_BINARY') and os.O_BINARY) - outfd = os.open(args.outputfile, flags) - except OSError: - print "There is already a file named %r -- aborting. Use --force to overwrite." % (args.outputfile,) - return 2 - outf = os.fdopen(outfd, "wb") - - sharefs = [] - # This sort() actually matters for performance (shares with numbers < k - # are much faster to use than the others), as well as being important for - # reproducibility. - args.sharefiles.sort() - for fn in args.sharefiles: - sharefs.append(open(fn, 'rb')) - try: - ret = filefec.decode_from_files(outf, sharefs, args.verbose) - except filefec.InsufficientShareFilesError, e: - print str(e) - return 3 - - return 0 - -# zfec -- fast forward error correction library with Python interface -# -# Copyright (C) 2007 Allmydata, Inc. -# Author: Zooko Wilcox-O'Hearn -# -# This file is part of zfec. -# -# See README.rst for licensing information. diff --git a/zfec/zfec/easyfec.py b/zfec/zfec/easyfec.py deleted file mode 100644 index dd209e7..0000000 --- a/zfec/zfec/easyfec.py +++ /dev/null @@ -1,64 +0,0 @@ -# zfec -- a fast C implementation of Reed-Solomon erasure coding with -# command-line, C, and Python interfaces - -import zfec - -# div_ceil() was copied from the pyutil library. -def div_ceil(n, d): - """ - The smallest integer k such that k*d >= n. - """ - return (n/d) + (n%d != 0) - -from base64 import b32encode -def ab(x): # debuggery - if len(x) >= 3: - return "%s:%s" % (len(x), b32encode(x[-3:]),) - elif len(x) == 2: - return "%s:%s" % (len(x), b32encode(x[-2:]),) - elif len(x) == 1: - return "%s:%s" % (len(x), b32encode(x[-1:]),) - elif len(x) == 0: - return "%s:%s" % (len(x), "--empty--",) - -class Encoder(object): - def __init__(self, k, m): - self.fec = zfec.Encoder(k, m) - - def encode(self, data): - """ - @param data: string - - @return: a sequence of m blocks -- any k of which suffice to - reconstruct the input data - """ - chunksize = div_ceil(len(data), self.fec.k) - l = [ data[i*chunksize:(i+1)*chunksize] + "\x00" * min(chunksize, (((i+1)*chunksize)-len(data))) for i in range(self.fec.k) ] - assert len(l) == self.fec.k, (len(l), self.fec.k,) - assert (not l) or (not [ x for x in l if len(x) != len(l[0]) ], (len(l), [ ab(x) for x in l ], chunksize, self.fec.k, len(data),)) - return self.fec.encode(l) - -class Decoder(object): - def __init__(self, k, m): - self.fec = zfec.Decoder(k, m) - - def decode(self, blocks, sharenums, padlen): - """ - @param padlen: the number of bytes of padding to strip off; Note that - the padlen is always equal to (blocksize times k) minus the length - of data. (Therefore, padlen can be 0.) - """ - data = ''.join(self.fec.decode(blocks, sharenums)) - if padlen: - return data[:-padlen] - else: - return data - -# zfec -- fast forward error correction library with Python interface -# -# Copyright (C) 2007 Allmydata, Inc. -# Author: Zooko Wilcox-O'Hearn -# -# This file is part of zfec. -# -# See README.rst for licensing information. diff --git a/zfec/zfec/fec.c b/zfec/zfec/fec.c deleted file mode 100644 index 751c00a..0000000 --- a/zfec/zfec/fec.c +++ /dev/null @@ -1,572 +0,0 @@ -/** - * zfec -- fast forward error correction library with Python interface - */ - -#include "fec.h" - -#include -#include -#include -#include - -/* - * Primitive polynomials - see Lin & Costello, Appendix A, - * and Lee & Messerschmitt, p. 453. - */ -static const char*const Pp="101110001"; - - -/* - * To speed up computations, we have tables for logarithm, exponent and - * inverse of a number. We use a table for multiplication as well (it takes - * 64K, no big deal even on a PDA, especially because it can be - * pre-initialized an put into a ROM!), otherwhise we use a table of - * logarithms. In any case the macro gf_mul(x,y) takes care of - * multiplications. - */ - -static gf gf_exp[510]; /* index->poly form conversion table */ -static int gf_log[256]; /* Poly->index form conversion table */ -static gf inverse[256]; /* inverse of field elem. */ - /* inv[\alpha**i]=\alpha**(GF_SIZE-i-1) */ - -/* - * modnn(x) computes x % GF_SIZE, where GF_SIZE is 2**GF_BITS - 1, - * without a slow divide. - */ -static gf -modnn(int x) { - while (x >= 255) { - x -= 255; - x = (x >> 8) + (x & 255); - } - return x; -} - -#define SWAP(a,b,t) {t tmp; tmp=a; a=b; b=tmp;} - -/* - * gf_mul(x,y) multiplies two numbers. It is much faster to use a - * multiplication table. - * - * USE_GF_MULC, GF_MULC0(c) and GF_ADDMULC(x) can be used when multiplying - * many numbers by the same constant. In this case the first call sets the - * constant, and others perform the multiplications. A value related to the - * multiplication is held in a local variable declared with USE_GF_MULC . See - * usage in _addmul1(). - */ -static gf gf_mul_table[256][256]; - -#define gf_mul(x,y) gf_mul_table[x][y] - -#define USE_GF_MULC register gf * __gf_mulc_ - -#define GF_MULC0(c) __gf_mulc_ = gf_mul_table[c] -#define GF_ADDMULC(dst, x) dst ^= __gf_mulc_[x] - -/* - * Generate GF(2**m) from the irreducible polynomial p(X) in p[0]..p[m] - * Lookup tables: - * index->polynomial form gf_exp[] contains j= \alpha^i; - * polynomial form -> index form gf_log[ j = \alpha^i ] = i - * \alpha=x is the primitive element of GF(2^m) - * - * For efficiency, gf_exp[] has size 2*GF_SIZE, so that a simple - * multiplication of two numbers can be resolved without calling modnn - */ -static void -_init_mul_table(void) { - int i, j; - for (i = 0; i < 256; i++) - for (j = 0; j < 256; j++) - gf_mul_table[i][j] = gf_exp[modnn (gf_log[i] + gf_log[j])]; - - for (j = 0; j < 256; j++) - gf_mul_table[0][j] = gf_mul_table[j][0] = 0; -} - -#define NEW_GF_MATRIX(rows, cols) \ - (gf*)malloc(rows * cols) - -/* - * initialize the data structures used for computations in GF. - */ -static void -generate_gf (void) { - int i; - gf mask; - - mask = 1; /* x ** 0 = 1 */ - gf_exp[8] = 0; /* will be updated at the end of the 1st loop */ - /* - * first, generate the (polynomial representation of) powers of \alpha, - * which are stored in gf_exp[i] = \alpha ** i . - * At the same time build gf_log[gf_exp[i]] = i . - * The first 8 powers are simply bits shifted to the left. - */ - for (i = 0; i < 8; i++, mask <<= 1) { - gf_exp[i] = mask; - gf_log[gf_exp[i]] = i; - /* - * If Pp[i] == 1 then \alpha ** i occurs in poly-repr - * gf_exp[8] = \alpha ** 8 - */ - if (Pp[i] == '1') - gf_exp[8] ^= mask; - } - /* - * now gf_exp[8] = \alpha ** 8 is complete, so can also - * compute its inverse. - */ - gf_log[gf_exp[8]] = 8; - /* - * Poly-repr of \alpha ** (i+1) is given by poly-repr of - * \alpha ** i shifted left one-bit and accounting for any - * \alpha ** 8 term that may occur when poly-repr of - * \alpha ** i is shifted. - */ - mask = 1 << 7; - for (i = 9; i < 255; i++) { - if (gf_exp[i - 1] >= mask) - gf_exp[i] = gf_exp[8] ^ ((gf_exp[i - 1] ^ mask) << 1); - else - gf_exp[i] = gf_exp[i - 1] << 1; - gf_log[gf_exp[i]] = i; - } - /* - * log(0) is not defined, so use a special value - */ - gf_log[0] = 255; - /* set the extended gf_exp values for fast multiply */ - for (i = 0; i < 255; i++) - gf_exp[i + 255] = gf_exp[i]; - - /* - * again special cases. 0 has no inverse. This used to - * be initialized to 255, but it should make no difference - * since noone is supposed to read from here. - */ - inverse[0] = 0; - inverse[1] = 1; - for (i = 2; i <= 255; i++) - inverse[i] = gf_exp[255 - gf_log[i]]; -} - -/* - * Various linear algebra operations that i use often. - */ - -/* - * addmul() computes dst[] = dst[] + c * src[] - * This is used often, so better optimize it! Currently the loop is - * unrolled 16 times, a good value for 486 and pentium-class machines. - * The case c=0 is also optimized, whereas c=1 is not. These - * calls are unfrequent in my typical apps so I did not bother. - */ -#define addmul(dst, src, c, sz) \ - if (c != 0) _addmul1(dst, src, c, sz) - -#define UNROLL 16 /* 1, 4, 8, 16 */ -static void -_addmul1(register gf*restrict dst, const register gf*restrict src, gf c, size_t sz) { - USE_GF_MULC; - const gf* lim = &dst[sz - UNROLL + 1]; - - GF_MULC0 (c); - -#if (UNROLL > 1) /* unrolling by 8/16 is quite effective on the pentium */ - for (; dst < lim; dst += UNROLL, src += UNROLL) { - GF_ADDMULC (dst[0], src[0]); - GF_ADDMULC (dst[1], src[1]); - GF_ADDMULC (dst[2], src[2]); - GF_ADDMULC (dst[3], src[3]); -#if (UNROLL > 4) - GF_ADDMULC (dst[4], src[4]); - GF_ADDMULC (dst[5], src[5]); - GF_ADDMULC (dst[6], src[6]); - GF_ADDMULC (dst[7], src[7]); -#endif -#if (UNROLL > 8) - GF_ADDMULC (dst[8], src[8]); - GF_ADDMULC (dst[9], src[9]); - GF_ADDMULC (dst[10], src[10]); - GF_ADDMULC (dst[11], src[11]); - GF_ADDMULC (dst[12], src[12]); - GF_ADDMULC (dst[13], src[13]); - GF_ADDMULC (dst[14], src[14]); - GF_ADDMULC (dst[15], src[15]); -#endif - } -#endif - lim += UNROLL - 1; - for (; dst < lim; dst++, src++) /* final components */ - GF_ADDMULC (*dst, *src); -} - -/* - * computes C = AB where A is n*k, B is k*m, C is n*m - */ -static void -_matmul(gf * a, gf * b, gf * c, unsigned n, unsigned k, unsigned m) { - unsigned row, col, i; - - for (row = 0; row < n; row++) { - for (col = 0; col < m; col++) { - gf *pa = &a[row * k]; - gf *pb = &b[col]; - gf acc = 0; - for (i = 0; i < k; i++, pa++, pb += m) - acc ^= gf_mul (*pa, *pb); - c[row * m + col] = acc; - } - } -} - -/* - * _invert_mat() takes a matrix and produces its inverse - * k is the size of the matrix. - * (Gauss-Jordan, adapted from Numerical Recipes in C) - * Return non-zero if singular. - */ -static void -_invert_mat(gf* src, unsigned k) { - gf c, *p; - unsigned irow = 0; - unsigned icol = 0; - unsigned row, col, i, ix; - - unsigned* indxc = (unsigned*) malloc (k * sizeof(unsigned)); - unsigned* indxr = (unsigned*) malloc (k * sizeof(unsigned)); - unsigned* ipiv = (unsigned*) malloc (k * sizeof(unsigned)); - gf *id_row = NEW_GF_MATRIX (1, k); - - memset (id_row, '\0', k * sizeof (gf)); - /* - * ipiv marks elements already used as pivots. - */ - for (i = 0; i < k; i++) - ipiv[i] = 0; - - for (col = 0; col < k; col++) { - gf *pivot_row; - /* - * Zeroing column 'col', look for a non-zero element. - * First try on the diagonal, if it fails, look elsewhere. - */ - if (ipiv[col] != 1 && src[col * k + col] != 0) { - irow = col; - icol = col; - goto found_piv; - } - for (row = 0; row < k; row++) { - if (ipiv[row] != 1) { - for (ix = 0; ix < k; ix++) { - if (ipiv[ix] == 0) { - if (src[row * k + ix] != 0) { - irow = row; - icol = ix; - goto found_piv; - } - } else - assert (ipiv[ix] <= 1); - } - } - } - found_piv: - ++(ipiv[icol]); - /* - * swap rows irow and icol, so afterwards the diagonal - * element will be correct. Rarely done, not worth - * optimizing. - */ - if (irow != icol) - for (ix = 0; ix < k; ix++) - SWAP (src[irow * k + ix], src[icol * k + ix], gf); - indxr[col] = irow; - indxc[col] = icol; - pivot_row = &src[icol * k]; - c = pivot_row[icol]; - assert (c != 0); - if (c != 1) { /* otherwhise this is a NOP */ - /* - * this is done often , but optimizing is not so - * fruitful, at least in the obvious ways (unrolling) - */ - c = inverse[c]; - pivot_row[icol] = 1; - for (ix = 0; ix < k; ix++) - pivot_row[ix] = gf_mul (c, pivot_row[ix]); - } - /* - * from all rows, remove multiples of the selected row - * to zero the relevant entry (in fact, the entry is not zero - * because we know it must be zero). - * (Here, if we know that the pivot_row is the identity, - * we can optimize the addmul). - */ - id_row[icol] = 1; - if (memcmp (pivot_row, id_row, k * sizeof (gf)) != 0) { - for (p = src, ix = 0; ix < k; ix++, p += k) { - if (ix != icol) { - c = p[icol]; - p[icol] = 0; - addmul (p, pivot_row, c, k); - } - } - } - id_row[icol] = 0; - } /* done all columns */ - for (col = k; col > 0; col--) - if (indxr[col-1] != indxc[col-1]) - for (row = 0; row < k; row++) - SWAP (src[row * k + indxr[col-1]], src[row * k + indxc[col-1]], gf); -} - -/* - * fast code for inverting a vandermonde matrix. - * - * NOTE: It assumes that the matrix is not singular and _IS_ a vandermonde - * matrix. Only uses the second column of the matrix, containing the p_i's. - * - * Algorithm borrowed from "Numerical recipes in C" -- sec.2.8, but largely - * revised for my purposes. - * p = coefficients of the matrix (p_i) - * q = values of the polynomial (known) - */ -void -_invert_vdm (gf* src, unsigned k) { - unsigned i, j, row, col; - gf *b, *c, *p; - gf t, xx; - - if (k == 1) /* degenerate case, matrix must be p^0 = 1 */ - return; - /* - * c holds the coefficient of P(x) = Prod (x - p_i), i=0..k-1 - * b holds the coefficient for the matrix inversion - */ - c = NEW_GF_MATRIX (1, k); - b = NEW_GF_MATRIX (1, k); - - p = NEW_GF_MATRIX (1, k); - - for (j = 1, i = 0; i < k; i++, j += k) { - c[i] = 0; - p[i] = src[j]; /* p[i] */ - } - /* - * construct coeffs. recursively. We know c[k] = 1 (implicit) - * and start P_0 = x - p_0, then at each stage multiply by - * x - p_i generating P_i = x P_{i-1} - p_i P_{i-1} - * After k steps we are done. - */ - c[k - 1] = p[0]; /* really -p(0), but x = -x in GF(2^m) */ - for (i = 1; i < k; i++) { - gf p_i = p[i]; /* see above comment */ - for (j = k - 1 - (i - 1); j < k - 1; j++) - c[j] ^= gf_mul (p_i, c[j + 1]); - c[k - 1] ^= p_i; - } - - for (row = 0; row < k; row++) { - /* - * synthetic division etc. - */ - xx = p[row]; - t = 1; - b[k - 1] = 1; /* this is in fact c[k] */ - for (i = k - 1; i > 0; i--) { - b[i-1] = c[i] ^ gf_mul (xx, b[i]); - t = gf_mul (xx, t) ^ b[i-1]; - } - for (col = 0; col < k; col++) - src[col * k + row] = gf_mul (inverse[t], b[col]); - } - free (c); - free (b); - free (p); - return; -} - -static int fec_initialized = 0; -static void -init_fec (void) { - generate_gf(); - _init_mul_table(); - fec_initialized = 1; -} - -/* - * This section contains the proper FEC encoding/decoding routines. - * The encoding matrix is computed starting with a Vandermonde matrix, - * and then transforming it into a systematic matrix. - */ - -#define FEC_MAGIC 0xFECC0DEC - -void -fec_free (fec_t *p) { - assert (p != NULL && p->magic == (((FEC_MAGIC ^ p->k) ^ p->n) ^ (unsigned long) (p->enc_matrix))); - free (p->enc_matrix); - free (p); -} - -fec_t * -fec_new(unsigned short k, unsigned short n) { - unsigned row, col; - gf *p, *tmp_m; - - fec_t *retval; - - if (fec_initialized == 0) - init_fec (); - - retval = (fec_t *) malloc (sizeof (fec_t)); - retval->k = k; - retval->n = n; - retval->enc_matrix = NEW_GF_MATRIX (n, k); - retval->magic = ((FEC_MAGIC ^ k) ^ n) ^ (unsigned long) (retval->enc_matrix); - tmp_m = NEW_GF_MATRIX (n, k); - /* - * fill the matrix with powers of field elements, starting from 0. - * The first row is special, cannot be computed with exp. table. - */ - tmp_m[0] = 1; - for (col = 1; col < k; col++) - tmp_m[col] = 0; - for (p = tmp_m + k, row = 0; row < n - 1; row++, p += k) - for (col = 0; col < k; col++) - p[col] = gf_exp[modnn (row * col)]; - - /* - * quick code to build systematic matrix: invert the top - * k*k vandermonde matrix, multiply right the bottom n-k rows - * by the inverse, and construct the identity matrix at the top. - */ - _invert_vdm (tmp_m, k); /* much faster than _invert_mat */ - _matmul(tmp_m + k * k, tmp_m, retval->enc_matrix + k * k, n - k, k, k); - /* - * the upper matrix is I so do not bother with a slow multiply - */ - memset (retval->enc_matrix, '\0', k * k * sizeof (gf)); - for (p = retval->enc_matrix, col = 0; col < k; col++, p += k + 1) - *p = 1; - free (tmp_m); - - return retval; -} - -/* To make sure that we stay within cache in the inner loops of fec_encode(). (It would - probably help to also do this for fec_decode(). */ -#ifndef STRIDE -#define STRIDE 8192 -#endif - -void -fec_encode(const fec_t* code, const gf*restrict const*restrict const src, gf*restrict const*restrict const fecs, const unsigned*restrict const block_nums, size_t num_block_nums, size_t sz) { - unsigned char i, j; - size_t k; - unsigned fecnum; - const gf* p; - - for (k = 0; k < sz; k += STRIDE) { - size_t stride = ((sz-k) < STRIDE)?(sz-k):STRIDE; - for (i=0; i= code->k); - memset(fecs[i]+k, 0, stride); - p = &(code->enc_matrix[fecnum * code->k]); - for (j = 0; j < code->k; j++) - addmul(fecs[i]+k, src[j]+k, p[j], stride); - } - } -} - -/** - * Build decode matrix into some memory space. - * - * @param matrix a space allocated for a k by k matrix - */ -void -build_decode_matrix_into_space(const fec_t*restrict const code, const unsigned*const restrict index, const unsigned k, gf*restrict const matrix) { - unsigned char i; - gf* p; - for (i=0, p=matrix; i < k; i++, p += k) { - if (index[i] < k) { - memset(p, 0, k); - p[i] = 1; - } else { - memcpy(p, &(code->enc_matrix[index[i] * code->k]), k); - } - } - _invert_mat (matrix, k); -} - -void -fec_decode(const fec_t* code, const gf*restrict const*restrict const inpkts, gf*restrict const*restrict const outpkts, const unsigned*restrict const index, size_t sz) { - gf* m_dec = (gf*)alloca(code->k * code->k); - unsigned char outix=0; - unsigned char row=0; - unsigned char col=0; - build_decode_matrix_into_space(code, index, code->k, m_dec); - - for (row=0; rowk; row++) { - assert ((index[row] >= code->k) || (index[row] == row)); /* If the block whose number is i is present, then it is required to be in the i'th element. */ - if (index[row] >= code->k) { - memset(outpkts[outix], 0, sz); - for (col=0; col < code->k; col++) - addmul(outpkts[outix], inpkts[col], m_dec[row * code->k + col], sz); - outix++; - } - } -} - -/** - * zfec -- fast forward error correction library with Python interface - * - * Copyright (C) 2007-2010 Zooko Wilcox-O'Hearn - * Author: Zooko Wilcox-O'Hearn - * - * This file is part of zfec. - * - * See README.rst for licensing information. - */ - -/* - * This work is derived from the "fec" software by Luigi Rizzo, et al., the - * copyright notice and licence terms of which are included below for reference. - * fec.c -- forward error correction based on Vandermonde matrices 980624 (C) - * 1997-98 Luigi Rizzo (luigi@iet.unipi.it) - * - * Portions derived from code by Phil Karn (karn@ka9q.ampr.org), - * Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari - * Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 - * - * Modifications by Dan Rubenstein (see Modifications.txt for - * their description. - * Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, - * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS - * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, - * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, - * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR - * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - */ diff --git a/zfec/zfec/fec.h b/zfec/zfec/fec.h deleted file mode 100644 index 249fe66..0000000 --- a/zfec/zfec/fec.h +++ /dev/null @@ -1,111 +0,0 @@ -/** - * zfec -- fast forward error correction library with Python interface - * - * See README.rst for documentation. - */ - -#include - -typedef unsigned char gf; - -typedef struct { - unsigned long magic; - unsigned short k, n; /* parameters of the code */ - gf* enc_matrix; -} fec_t; - -#if defined(_MSC_VER) -// actually, some of the flavors (i.e. Enterprise) do support restrict -//#define restrict __restrict -#define restrict -#endif - -/** - * param k the number of blocks required to reconstruct - * param m the total number of blocks created - */ -fec_t* fec_new(unsigned short k, unsigned short m); -void fec_free(fec_t* p); - -/** - * @param inpkts the "primary blocks" i.e. the chunks of the input data - * @param fecs buffers into which the secondary blocks will be written - * @param block_nums the numbers of the desired check blocks (the id >= k) which fec_encode() will produce and store into the buffers of the fecs parameter - * @param num_block_nums the length of the block_nums array - * @param sz size of a packet in bytes - */ -void fec_encode(const fec_t* code, const gf*restrict const*restrict const src, gf*restrict const*restrict const fecs, const unsigned*restrict const block_nums, size_t num_block_nums, size_t sz); - -/** - * @param inpkts an array of packets (size k); If a primary block, i, is present then it must be at index i. Secondary blocks can appear anywhere. - * @param outpkts an array of buffers into which the reconstructed output packets will be written (only packets which are not present in the inpkts input will be reconstructed and written to outpkts) - * @param index an array of the blocknums of the packets in inpkts - * @param sz size of a packet in bytes - */ -void fec_decode(const fec_t* code, const gf*restrict const*restrict const inpkts, gf*restrict const*restrict const outpkts, const unsigned*restrict const index, size_t sz); - -#if defined(_MSC_VER) -#define alloca _alloca -#else -#ifdef __GNUC__ -#ifndef alloca -#define alloca(x) __builtin_alloca(x) -#endif -#else -#include -#endif -#endif - -/** - * zfec -- fast forward error correction library with Python interface - * - * Copyright (C) 2007-2008 Allmydata, Inc. - * Author: Zooko Wilcox-O'Hearn - * - * This file is part of zfec. - * - * See README.rst for licensing information. - */ - -/* - * Much of this work is derived from the "fec" software by Luigi Rizzo, et - * al., the copyright notice and licence terms of which are included below - * for reference. - * - * fec.h -- forward error correction based on Vandermonde matrices - * 980614 - * (C) 1997-98 Luigi Rizzo (luigi@iet.unipi.it) - * - * Portions derived from code by Phil Karn (karn@ka9q.ampr.org), - * Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari - * Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 - * - * Modifications by Dan Rubenstein (see Modifications.txt for - * their description. - * Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, - * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS - * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, - * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, - * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR - * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - */ - diff --git a/zfec/zfec/filefec.py b/zfec/zfec/filefec.py deleted file mode 100644 index 48f3d9d..0000000 --- a/zfec/zfec/filefec.py +++ /dev/null @@ -1,504 +0,0 @@ -import easyfec, zfec -from pyutil import fileutil -from pyutil.mathutil import pad_size, log_ceil - -import array, os, struct - -CHUNKSIZE = 4096 - -from base64 import b32encode -def ab(x): # debuggery - if len(x) >= 3: - return "%s:%s" % (len(x), b32encode(x[-3:]),) - elif len(x) == 2: - return "%s:%s" % (len(x), b32encode(x[-2:]),) - elif len(x) == 1: - return "%s:%s" % (len(x), b32encode(x[-1:]),) - elif len(x) == 0: - return "%s:%s" % (len(x), "--empty--",) - -class InsufficientShareFilesError(zfec.Error): - def __init__(self, k, kb, *args, **kwargs): - zfec.Error.__init__(self, *args, **kwargs) - self.k = k - self.kb = kb - - def __repr__(self): - return "Insufficient share files -- %d share files are required to recover this file, but only %d were given" % (self.k, self.kb,) - - def __str__(self): - return self.__repr__() - -class CorruptedShareFilesError(zfec.Error): - pass - -def _build_header(m, k, pad, sh): - """ - @param m: the total number of shares; 1 <= m <= 256 - @param k: the number of shares required to reconstruct; 1 <= k <= m - @param pad: the number of bytes of padding added to the file before encoding; 0 <= pad < k - @param sh: the shnum of this share; 0 <= k < m - - @return: a compressed string encoding m, k, pad, and sh - """ - assert m >= 1 - assert m <= 2**8 - assert k >= 1 - assert k <= m - assert pad >= 0 - assert pad < k - - assert sh >= 0 - assert sh < m - - bitsused = 0 - val = 0 - - val |= (m - 1) - bitsused += 8 # the first 8 bits always encode m - - kbits = log_ceil(m, 2) # num bits needed to store all possible values of k - val <<= kbits - bitsused += kbits - - val |= (k - 1) - - padbits = log_ceil(k, 2) # num bits needed to store all possible values of pad - val <<= padbits - bitsused += padbits - - val |= pad - - shnumbits = log_ceil(m, 2) # num bits needed to store all possible values of shnum - val <<= shnumbits - bitsused += shnumbits - - val |= sh - - assert bitsused >= 8, bitsused - assert bitsused <= 32, bitsused - - if bitsused <= 16: - val <<= (16-bitsused) - cs = struct.pack('>H', val) - assert cs[:-2] == '\x00' * (len(cs)-2) - return cs[-2:] - if bitsused <= 24: - val <<= (24-bitsused) - cs = struct.pack('>I', val) - assert cs[:-3] == '\x00' * (len(cs)-3) - return cs[-3:] - else: - val <<= (32-bitsused) - cs = struct.pack('>I', val) - assert cs[:-4] == '\x00' * (len(cs)-4) - return cs[-4:] - -def MASK(bits): - return (1<> b2_bits_left) + 1 - - shbits = log_ceil(m, 2) # num bits needed to store all possible values of shnum - padbits = log_ceil(k, 2) # num bits needed to store all possible values of pad - - val = byte & (~kbitmask) - - needed_padbits = padbits - b2_bits_left - if needed_padbits > 0: - ch = inf.read(1) - if not ch: - raise CorruptedShareFilesError("Share files were corrupted -- share file %r didn't have a complete metadata header at the front. Perhaps the file was truncated." % (inf.name,)) - byte = struct.unpack(">B", ch)[0] - val <<= 8 - val |= byte - needed_padbits -= 8 - assert needed_padbits <= 0 - extrabits = -needed_padbits - pad = val >> extrabits - val &= MASK(extrabits) - - needed_shbits = shbits - extrabits - if needed_shbits > 0: - ch = inf.read(1) - if not ch: - raise CorruptedShareFilesError("Share files were corrupted -- share file %r didn't have a complete metadata header at the front. Perhaps the file was truncated." % (inf.name,)) - byte = struct.unpack(">B", ch)[0] - val <<= 8 - val |= byte - needed_shbits -= 8 - assert needed_shbits <= 0 - - gotshbits = -needed_shbits - - sh = val >> gotshbits - - return (m, k, pad, sh,) - -FORMAT_FORMAT = "%%s.%%0%dd_%%0%dd%%s" -RE_FORMAT = "%s.[0-9]+_[0-9]+%s" -def encode_to_files(inf, fsize, dirname, prefix, k, m, suffix=".fec", overwrite=False, verbose=False): - """ - Encode inf, writing the shares to specially named, newly created files. - - @param fsize: calling read() on inf must yield fsize bytes of data and - then raise an EOFError - @param dirname: the name of the directory into which the sharefiles will - be written - """ - mlen = len(str(m)) - format = FORMAT_FORMAT % (mlen, mlen,) - - padbytes = pad_size(fsize, k) - - fns = [] - fs = [] - try: - for shnum in range(m): - hdr = _build_header(m, k, padbytes, shnum) - - fn = os.path.join(dirname, format % (prefix, shnum, m, suffix,)) - if verbose: - print "Creating share file %r..." % (fn,) - if overwrite: - f = open(fn, "wb") - else: - flags = os.O_WRONLY|os.O_CREAT|os.O_EXCL | (hasattr(os, 'O_BINARY') and os.O_BINARY) - fd = os.open(fn, flags) - f = os.fdopen(fd, "wb") - f.write(hdr) - fs.append(f) - fns.append(fn) - sumlen = [0] - def cb(blocks, length): - assert len(blocks) == len(fs) - oldsumlen = sumlen[0] - sumlen[0] += length - if verbose: - if int((float(oldsumlen) / fsize) * 10) != int((float(sumlen[0]) / fsize) * 10): - print str(int((float(sumlen[0]) / fsize) * 10) * 10) + "% ...", - - if sumlen[0] > fsize: - raise IOError("Wrong file size -- possibly the size of the file changed during encoding. Original size: %d, observed size at least: %s" % (fsize, sumlen[0],)) - for i in range(len(blocks)): - data = blocks[i] - fs[i].write(data) - length -= len(data) - - encode_file_stringy_easyfec(inf, cb, k, m, chunksize=4096) - except EnvironmentError, le: - print "Cannot complete because of exception: " - print le - print "Cleaning up..." - # clean up - while fs: - f = fs.pop() - f.close() ; del f - fn = fns.pop() - if verbose: - print "Cleaning up: trying to remove %r..." % (fn,) - fileutil.remove_if_possible(fn) - return 1 - if verbose: - print - print "Done!" - return 0 - -# Note: if you really prefer base-2 and you change this code, then please -# denote 2^20 as "MiB" instead of "MB" in order to avoid ambiguity. See: -# http://en.wikipedia.org/wiki/Megabyte -# Thanks. -MILLION_BYTES=10**6 - -def decode_from_files(outf, infiles, verbose=False): - """ - Decode from the first k files in infiles, writing the results to outf. - """ - assert len(infiles) >= 2 - infs = [] - shnums = [] - m = None - k = None - padlen = None - - byteswritten = 0 - for f in infiles: - (nm, nk, npadlen, shnum,) = _parse_header(f) - if not (m is None or m == nm): - raise CorruptedShareFilesError("Share files were corrupted -- share file %r said that m was %s but another share file previously said that m was %s" % (f.name, nm, m,)) - m = nm - if not (k is None or k == nk): - raise CorruptedShareFilesError("Share files were corrupted -- share file %r said that k was %s but another share file previously said that k was %s" % (f.name, nk, k,)) - if k > len(infiles): - raise InsufficientShareFilesError(k, len(infiles)) - k = nk - if not (padlen is None or padlen == npadlen): - raise CorruptedShareFilesError("Share files were corrupted -- share file %r said that pad length was %s but another share file previously said that pad length was %s" % (f.name, npadlen, padlen,)) - padlen = npadlen - - infs.append(f) - shnums.append(shnum) - - if len(infs) == k: - break - - dec = easyfec.Decoder(k, m) - - while True: - chunks = [ inf.read(CHUNKSIZE) for inf in infs ] - if [ch for ch in chunks if len(ch) != len(chunks[-1])]: - raise CorruptedShareFilesError("Share files were corrupted -- all share files are required to be the same length, but they weren't.") - - if len(chunks[-1]) == CHUNKSIZE: - # Then this was a full read, so we're still in the sharefiles. - resultdata = dec.decode(chunks, shnums, padlen=0) - outf.write(resultdata) - byteswritten += len(resultdata) - if verbose: - if ((byteswritten - len(resultdata)) / (10*MILLION_BYTES)) != (byteswritten / (10*MILLION_BYTES)): - print str(byteswritten / MILLION_BYTES) + " MB ...", - else: - # Then this was a short read, so we've reached the end of the sharefiles. - resultdata = dec.decode(chunks, shnums, padlen) - outf.write(resultdata) - return # Done. - if verbose: - print - print "Done!" - -def encode_file(inf, cb, k, m, chunksize=4096): - """ - Read in the contents of inf, encode, and call cb with the results. - - First, k "input blocks" will be read from inf, each input block being of - size chunksize. Then these k blocks will be encoded into m "result - blocks". Then cb will be invoked, passing a list of the m result blocks - as its first argument, and the length of the encoded data as its second - argument. (The length of the encoded data is always equal to k*chunksize, - until the last iteration, when the end of the file has been reached and - less than k*chunksize bytes could be read from the file.) This procedure - is iterated until the end of the file is reached, in which case the space - of the input blocks that is unused is filled with zeroes before encoding. - - Note that the sequence passed in calls to cb() contains mutable array - objects in its first k elements whose contents will be overwritten when - the next segment is read from the input file. Therefore the - implementation of cb() has to either be finished with those first k arrays - before returning, or if it wants to keep the contents of those arrays for - subsequent use after it has returned then it must make a copy of them to - keep. - - @param inf the file object from which to read the data - @param cb the callback to be invoked with the results - @param k the number of shares required to reconstruct the file - @param m the total number of shares created - @param chunksize how much data to read from inf for each of the k input - blocks - """ - enc = zfec.Encoder(k, m) - l = tuple([ array.array('c') for i in range(k) ]) - indatasize = k*chunksize # will be reset to shorter upon EOF - eof = False - ZEROES=array.array('c', ['\x00'])*chunksize - while not eof: - # This loop body executes once per segment. - i = 0 - while (i= 3: - return "%s:%s" % (len(x), b32encode(x[-3:]),) - elif len(x) == 2: - return "%s:%s" % (len(x), b32encode(x[-2:]),) - elif len(x) == 1: - return "%s:%s" % (len(x), b32encode(x[-1:]),) - elif len(x) == 0: - return "%s:%s" % (len(x), "--empty--",) - -def randstr(n): - return ''.join(map(chr, map(random.randrange, [0]*n, [256]*n))) - -def _h(k, m, ss): - encer = zfec.Encoder(k, m) - nums_and_blocks = list(enumerate(encer.encode(ss))) - assert isinstance(nums_and_blocks, list), nums_and_blocks - assert len(nums_and_blocks) == m, (len(nums_and_blocks), m,) - nums_and_blocks = random.sample(nums_and_blocks, k) - blocks = [ x[1] for x in nums_and_blocks ] - nums = [ x[0] for x in nums_and_blocks ] - decer = zfec.Decoder(k, m) - decoded = decer.decode(blocks, nums) - assert len(decoded) == len(ss), (len(decoded), len(ss),) - assert tuple([str(s) for s in decoded]) == tuple([str(s) for s in ss]), (tuple([ab(str(s)) for s in decoded]), tuple([ab(str(s)) for s in ss]),) - -def _help_test_random(): - m = random.randrange(1, 257) - k = random.randrange(1, m+1) - l = random.randrange(0, 2**9) - ss = [ randstr(l/k) for x in range(k) ] - _h(k, m, ss) - -def _help_test_random_with_l(l): - m = random.randrange(1, 257) - k = random.randrange(1, m+1) - ss = [ randstr(l/k) for x in range(k) ] - _h(k, m, ss) - -def _h_easy(k, m, s): - encer = zfec.easyfec.Encoder(k, m) - nums_and_blocks = list(enumerate(encer.encode(s))) - assert isinstance(nums_and_blocks, list), nums_and_blocks - assert len(nums_and_blocks) == m, (len(nums_and_blocks), m,) - nums_and_blocks = random.sample(nums_and_blocks, k) - blocks = [ x[1] for x in nums_and_blocks ] - nums = [ x[0] for x in nums_and_blocks ] - decer = zfec.easyfec.Decoder(k, m) - - decodeds = decer.decode(blocks, nums, padlen=k*len(blocks[0]) - len(s)) - assert len(decodeds) == len(s), (ab(decodeds), ab(s), k, m) - assert decodeds == s, (ab(decodeds), ab(s),) - -def _help_test_random_easy(): - m = random.randrange(1, 257) - k = random.randrange(1, m+1) - l = random.randrange(0, 2**9) - s = randstr(l) - _h_easy(k, m, s) - -def _help_test_random_with_l_easy(l): - m = random.randrange(1, 257) - k = random.randrange(1, m+1) - s = randstr(l) - _h_easy(k, m, s) - -class ZFecTest(unittest.TestCase): - def test_instantiate_encoder_no_args(self): - try: - e = zfec.Encoder() - except TypeError: - # Okay, so that's because we're required to pass constructor args. - pass - else: - # Oops, it should have raised an exception. - self.fail("Should have raised exception from incorrect arguments to constructor.") - - def test_instantiate_decoder_no_args(self): - try: - e = zfec.Decoder() - except TypeError: - # Okay, so that's because we're required to pass constructor args. - pass - else: - # Oops, it should have raised an exception. - self.fail("Should have raised exception from incorrect arguments to constructor.") - - def test_from_agl_c(self): - self.failUnless(zfec._fec.test_from_agl()) - - def test_from_agl_py(self): - e = zfec.Encoder(3, 5) - b0 = '\x01'*8 ; b1 = '\x02'*8 ; b2 = '\x03'*8 - # print "_from_py before encoding:" - # print "b0: %s, b1: %s, b2: %s" % tuple(base64.b16encode(x) for x in [b0, b1, b2]) - - b3, b4 = e.encode([b0, b1, b2], (3, 4)) - # print "after encoding:" - # print "b3: %s, b4: %s" % tuple(base64.b16encode(x) for x in [b3, b4]) - - d = zfec.Decoder(3, 5) - r0, r1, r2 = d.decode((b2, b3, b4), (1, 2, 3)) - - # print "after decoding:" - # print "b0: %s, b1: %s" % tuple(base64.b16encode(x) for x in [b0, b1]) - - def test_small(self): - for i in range(16): - _help_test_random_with_l(i) - if VERBOSE: - print "%d randomized tests pass." % (i+1) - - def test_random(self): - for i in range(3): - _help_test_random() - if VERBOSE: - print "%d randomized tests pass." % (i+1) - - def test_bad_args_construct_decoder(self): - try: - zfec.Decoder(-1, -1) - except zfec.Error, e: - assert "argument is required to be greater than or equal to 1" in str(e), e - else: - self.fail("Should have gotten an exception from out-of-range arguments.") - - try: - zfec.Decoder(1, 257) - except zfec.Error, e: - assert "argument is required to be less than or equal to 256" in str(e), e - else: - self.fail("Should have gotten an exception from out-of-range arguments.") - - try: - zfec.Decoder(3, 2) - except zfec.Error, e: - assert "first argument is required to be less than or equal to the second argument" in str(e), e - else: - self.fail("Should have gotten an exception from out-of-range arguments.") - - def test_bad_args_construct_encoder(self): - try: - zfec.Encoder(-1, -1) - except zfec.Error, e: - assert "argument is required to be greater than or equal to 1" in str(e), e - else: - self.fail("Should have gotten an exception from out-of-range arguments.") - - try: - zfec.Encoder(1, 257) - except zfec.Error, e: - assert "argument is required to be less than or equal to 256" in str(e), e - else: - self.fail("Should have gotten an exception from out-of-range arguments.") - - def test_bad_args_dec(self): - decer = zfec.Decoder(2, 4) - - try: - decer.decode(98, []) # first argument is not a sequence - except TypeError, e: - assert "First argument was not a sequence" in str(e), e - else: - self.fail("Should have gotten TypeError for wrong type of second argument.") - - try: - decer.decode(["a", "b", ], ["c", "d",]) - except zfec.Error, e: - assert "Precondition violation: second argument is required to contain int" in str(e), e - else: - self.fail("Should have gotten zfec.Error for wrong type of second argument.") - - try: - decer.decode(["a", "b", ], 98) # not a sequence at all - except TypeError, e: - assert "Second argument was not a sequence" in str(e), e - else: - self.fail("Should have gotten TypeError for wrong type of second argument.") - -class EasyFecTest(unittest.TestCase): - def test_small(self): - for i in range(16): - _help_test_random_with_l_easy(i) - if VERBOSE: - print "%d randomized tests pass." % (i+1) - - def test_random(self): - for i in range(3): - _help_test_random_easy() - if VERBOSE: - print "%d randomized tests pass." % (i+1) - - def test_bad_args_dec(self): - decer = zfec.easyfec.Decoder(2, 4) - - try: - decer.decode(98, [0, 1], 0) # first argument is not a sequence - except TypeError, e: - assert "First argument was not a sequence" in str(e), e - else: - self.fail("Should have gotten TypeError for wrong type of second argument.") - - try: - decer.decode("ab", ["c", "d",], 0) - except zfec.Error, e: - assert "Precondition violation: second argument is required to contain int" in str(e), e - else: - self.fail("Should have gotten zfec.Error for wrong type of second argument.") - - try: - decer.decode("ab", 98, 0) # not a sequence at all - except TypeError, e: - assert "Second argument was not a sequence" in str(e), e - else: - self.fail("Should have gotten TypeError for wrong type of second argument.") - -class FileFec(unittest.TestCase): - def test_filefec_header(self): - for m in [1, 2, 3, 5, 7, 9, 11, 17, 19, 33, 35, 65, 66, 67, 129, 130, 131, 254, 255, 256,]: - for k in [1, 2, 3, 5, 9, 17, 33, 65, 129, 255, 256,]: - if k >= m: - continue - for pad in [0, 1, k-1,]: - if pad >= k: - continue - for sh in [0, 1, m-1,]: - if sh >= m: - continue - h = zfec.filefec._build_header(m, k, pad, sh) - hio = cStringIO.StringIO(h) - (rm, rk, rpad, rsh,) = zfec.filefec._parse_header(hio) - assert (rm, rk, rpad, rsh,) == (m, k, pad, sh,), h - - def _help_test_filefec(self, teststr, k, m, numshs=None): - if numshs == None: - numshs = m - - TESTFNAME = "testfile.txt" - PREFIX = "test" - SUFFIX = ".fec" - - fsize = len(teststr) - - tempdir = fileutil.NamedTemporaryDirectory(cleanup=True) - try: - tempf = tempdir.file(TESTFNAME, 'w+b') - tempf.write(teststr) - tempf.flush() - tempf.seek(0) - - # encode the file - zfec.filefec.encode_to_files(tempf, fsize, tempdir.name, PREFIX, k, m, SUFFIX, verbose=VERBOSE) - - # select some share files - RE=re.compile(zfec.filefec.RE_FORMAT % (PREFIX, SUFFIX,)) - fns = os.listdir(tempdir.name) - assert len(fns) >= m, (fns, tempdir, tempdir.name,) - sharefs = [ open(os.path.join(tempdir.name, fn), "rb") for fn in fns if RE.match(fn) ] - for sharef in sharefs: - tempdir.register_file(sharef) - random.shuffle(sharefs) - del sharefs[numshs:] - - # decode from the share files - outf = tempdir.file('recovered-testfile.txt', 'w+b') - zfec.filefec.decode_from_files(outf, sharefs, verbose=VERBOSE) - outf.flush() - outf.seek(0) - recovereddata = outf.read() - assert recovereddata == teststr, (ab(recovereddata), ab(teststr),) - finally: - tempdir.shutdown() - - def test_filefec_all_shares(self): - return self._help_test_filefec("Yellow Whirled!", 3, 8) - - def test_filefec_all_shares_1_b(self): - return self._help_test_filefec("Yellow Whirled!", 4, 16) - - def test_filefec_all_shares_2(self): - return self._help_test_filefec("Yellow Whirled", 3, 8) - - def test_filefec_all_shares_2_b(self): - return self._help_test_filefec("Yellow Whirled", 4, 16) - - def test_filefec_all_shares_3(self): - return self._help_test_filefec("Yellow Whirle", 3, 8) - - def test_filefec_all_shares_3_b(self): - return self._help_test_filefec("Yellow Whirle", 4, 16) - - def test_filefec_all_shares_with_padding(self, noisy=VERBOSE): - return self._help_test_filefec("Yellow Whirled!A", 3, 8) - - def test_filefec_min_shares_with_padding(self, noisy=VERBOSE): - return self._help_test_filefec("Yellow Whirled!A", 3, 8, numshs=3) - - def test_filefec_min_shares_with_crlf(self, noisy=VERBOSE): - return self._help_test_filefec("llow Whirled!A\r\n", 3, 8, numshs=3) - - def test_filefec_min_shares_with_lf(self, noisy=VERBOSE): - return self._help_test_filefec("Yellow Whirled!A\n", 3, 8, numshs=3) - - def test_filefec_min_shares_with_lflf(self, noisy=VERBOSE): - return self._help_test_filefec("Yellow Whirled!A\n\n", 3, 8, numshs=3) - - def test_filefec_min_shares_with_crcrlflf(self, noisy=VERBOSE): - return self._help_test_filefec("Yellow Whirled!A\r\r\n\n", 3, 8, numshs=3) - - -class Cmdline(unittest.TestCase): - def test_basic(self, noisy=VERBOSE): - tempdir = fileutil.NamedTemporaryDirectory(cleanup=True) - fo = tempdir.file("test.data", "w+b") - fo.write("WHEHWHJEKWAHDLJAWDHWALKDHA") - - import sys - realargv = sys.argv - try: - DEFAULT_M=8 - DEFAULT_K=3 - sys.argv = ["zfec", os.path.join(tempdir.name, "test.data"),] - - retcode = zfec.cmdline_zfec.main() - assert retcode == 0, retcode - - RE=re.compile(zfec.filefec.RE_FORMAT % ('test.data', ".fec",)) - fns = os.listdir(tempdir.name) - assert len(fns) >= DEFAULT_M, (fns, DEFAULT_M, tempdir, tempdir.name,) - sharefns = [ os.path.join(tempdir.name, fn) for fn in fns if RE.match(fn) ] - random.shuffle(sharefns) - del sharefns[DEFAULT_K:] - - sys.argv = ["zunfec",] - sys.argv.extend(sharefns) - sys.argv.extend(['-o', os.path.join(tempdir.name, 'test.data-recovered'),]) - - retcode = zfec.cmdline_zunfec.main() - assert retcode == 0, retcode - import filecmp - assert filecmp.cmp(os.path.join(tempdir.name, 'test.data'), os.path.join(tempdir.name, 'test.data-recovered')) - finally: - sys.argv = realargv - -if __name__ == "__main__": - unittest.main()