diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..68baf955c --- /dev/null +++ b/LICENSE @@ -0,0 +1,34 @@ +For code in the django/ directory, see LICENSE.django file). + +For files under static/js/yui/, static/css/yui/ and static/images/yui/, +see http://developer.yahoo.net/yui/license.txt. + +--------------------------------------------------------------------------- + +Copyright (c) 2008, The IETF Trust +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/LICENSE.django b/LICENSE.django new file mode 100644 index 000000000..8f7a2664d --- /dev/null +++ b/LICENSE.django @@ -0,0 +1,30 @@ +(this applies to the django/ subdirectory) +--------------------------------------------------------------------------- + +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/changelog b/changelog new file mode 100644 index 000000000..36efc6c73 --- /dev/null +++ b/changelog @@ -0,0 +1,1155 @@ +ietfdb (3.01) + + * Add a button to issue a ballot for documents which has entered IESG + processing but don't have a ballot yet. + + * Always make idinternal.docstate() return a string -- there are callers + that assume this, like the script which generates all_id2. + + * Don't fail with a server error (500) if a bad draft name (containing + for instance newlines) is entered -- just return a 404. + + * For WG charters under evaluation, show the year too, not only month and + day. This will reveal bad date information in the charter files. + + * If we fail to parse the date of a WG charter under evaluation, use 1 + Jan 1900, and carry on. Don't fail with a server error. + + -- Henrik Levkowetz 20 Jul 2010 18:41:31 +0200 + +ietfdb (3.00) + + * This release adds the IOLA port of the former Perl IESG datatracker to + the Python/Django datatracker. This should make most AD actions on + drafts available through the Django datatracker, normally from the + datatracker.ietf.org/doc// page or from the /doc/ad/.../ + page, or expressed UI-wise: from the pages reached from the 'AD Dashboard' + menu at the top of the lefthand menubar in the datatracker. This + includes actions such as: + + - Change draft state + - Edit draft metadata + - Defer ballot, undefer ballot + - Handle ballot comments + - Handle ballot writeups + - Approve ballots + - Initiate last call + + Secretariat staff actions supported through the Django datatracker now + includes: + + - Manage telechat dates + - Manage working group descriptions + + Tweaks to the new interface and capabilities will be releases with + a short release cycle, as they are discovered and fixed over the next + weeks. + + -- Henrik Levkowetz 16 Jul 2010 18:14:48 +0200 + +ietfdb (2.52) + + * Added machine-readable ballot information (tsv and json) for drafts with + ballots: /doc//ballot.tsv and /doc//ballot.json . + Json data for the whole document is available for all drafts at + /doc//doc.json . + + * Added Unicode NFKC normalization of paths. Somebody just tried to access + /wg/ipfix/charter/ with 'fi' represented as the unicode ligature \ufb01... + + * Changed the text emitted during testing to have less noise at the + beginning of the line -- only 'OK', 'Fail', and the initial line of coverage + reports now starts at the beginning of the line, without whitespace. This + makes it easier to scan a test run visually to find reported failures. + + -- Henrik Levkowetz 16 Jul 2010 13:02:46 +0200 + +ietfdb (2.51) + + * Add an html sanitizer and a sanitize_html template filter. This uses + the html5lib, which has also been included in the svn tree. + + * Provide better handling of both text/plain document comments and + document comments which contain html tags. Adds urlization, + linebreak-preservation and html sanitization. Html sanitization would be + better applied when adding comments, but we're not yet a position to + enforce this for all input paths. + + * Catch an additional exception which can occur during generation of + all_id2.txt + + -- Henrik Levkowetz 07 May 2010 12:26:58 +0200 + +ietfdb (2.50) + + From RjS: + + * Don't send mail if we don't have any recipients... + + From Pasi: + + * /iesg/agenda/: fix bug related to non-existing ballots + + * Removed old (unused) YUI js/css + + * /ipr/search/: avoid exception for unexpected search types; comment out + debug logging + + * /doc/active, all: add link to all_id2.txt + + * /liaison/: add link to liaison statement manager tool + + From Henrik: + + * Make version information on the web pages a little less cryptic. + + * Tweak the IESG agenda page for better placement of IPR information; + requested by Russ. Also align the ballot graphics. + + * Add admin site support for the DocumentComment model. + + * If replaced-by information exists, display it, even if the document + state seems to be incorrect (something else than 'Replaced'). + + + -- Henrik Levkowetz 26 Apr 2010 11:45:24 +0200 + +ietfdb (2.49) + + From Pasi: + + * New script to generate all_id2.txt. This should be called in + www6s/scripts/datatracker-updater: run "python -m + ietf.idindex.generate_all_id2_txt" and place its output + in $ID/all_id2.txt. + + * /wg/: link to both www.ietf.org and tools lists of concluded WGs + + * /doc/: show full responsible AD name for old drafts, too + + * Eliminated some cases where exceptions could be thrown + + * Various minor tweaks and fixes + + From Henrik: + + * Added commands to the mkrelease script to update the IETF Tools feed + with release information. + + -- Henrik Levkowetz 24 Mar 2010 21:11:27 +0100 + +ietfdb (2.48) + + From Pasi: + + * Show comments feed link only if it exists (=document is in IDInternal) + + * Fixed a broken link on main page + + * Move new look-and-feel base templates from templates/idrfc/ to just + templates/ + + * Remove more unused code + + * Replaced /idtracker/help/evaluation/ with redirect to www.ietf.org + + * /wg/: fix mailto links for WG chairs + + * /doc/: show telechat date in search results list + + * Show correct version number of expired drafts + + * /doc/rfcNNNN/: show rfc stream + + * Use new look for more pages + + * Replaced most of /idtracker/ (search, document page, ballot page, + comment page) with redirects to /doc/ + + * /doc/*: Show information about ballot sets + + * Refactoring /wg/: use redirects instead of redundant URLs; removed old + charter pages (not used/linked to); misc. cleaning + + * Point to /wg// in left navbar and /meeting/agenda/ + + + -- Henrik Levkowetz 22 Mar 2010 00:26:41 +0100 + +ietfdb (2.47) + + From Suresh: + + * Added a link to the draft pages with email address to the draft authors + + From RjS: + + * Added STD/FYI/BCP info to the standard RFC display page + + * Added a link to the comment feed next to the nits link + + From Bill: + + * Output a summary of what's needed for the document to be approved in + the main and ballot views. + + From Tony: + + * Search plugin for Firefox/IE/etc. + + From Pasi: + + * Correctly handle entities (like >) in IESG note field + + * New script for finding database tables with non-ASCII data + + * /doc/*: allow direct links to tabs/individual comments + + * /iesg/agenda/: show management item text if logged in as AD + + * /wg/acronym/: fix warning for concluded WGs + + * Support PPTX meeting slides + + -- Henrik Levkowetz 21 Mar 2010 03:03:42 +0100 + +ietfdb (2.46) + + From Robert: + + * Add the I-D expiration notification script. + + *** Please add the following cron entry: + + # Send notifications about pending I-D expirations. + 42 4 * * 1 /a/www/ietf-datatracker/web/ietf/bin/notify-expirations + + From Henrik: + + * Adding the south schema migration app (http://south.aeracode.org/). + + * Change the text of IPR search result to more correctly reflect the + status of the search result when IPR has been found on documents related to + a listed document, but not on that document itself. + + From Pasi: + + * Use the new look for a bunch of additional pages + + * /admin/: fix login redirects to work with RemoteUserMiddleware; hide + password changing functionality (which doesn't work with RemoteUser anyway) + + * /ipr/: remove separate list for updating + + * /wg/acronym/*: clearer warning about concluded WGs + + * A lot of various code and deployment cleanup + + * A lot of minor tweaks and fixes (tooltips, links, and more) + + * /doc/rfcNNNN/: include link to errata + * /doc/rfcNNN/: include links to PDF/PS versions (if they exist); handle + RFCs that are not available as .txt + + * Added meta description tags (to improve search result usability): + drafts/RFCs, IPR disclosures, liaison statements + + * Cache/gzip meeting agenda and materials pages + + * If draft doesn't exist under /idtracker/, redirect to /doc/ instead of + showing error + + * Show /doc/ad/first.last/ only for ADs, not other folks in IESGLogin + table + + * Fixed bunch of null/unique attributes for model fields + + * New script to check for violated database constraints (NOT NULL, + UNIQUE, FOREIGN KEY) + + -- Henrik Levkowetz 21 Mar 2010 00:36:05 +0100 + +ietfdb (2.45) + + From Pasi: + + * Use wg_www_pages table (and removed unused idtracker_areaurl). + Requires applying changes to database as follows: + + cd /a/www/ietf-datatracker/2.45/ietf + PYTHONPATH=../ python manage.py dbshell < ../test/sql_fixup.sql + + * Parse stream/wg/file formats from rfc-index.xml; prepare for parsing + auth48 info from queue2.xml + + * IPR disclosures: Don't include .doc/.html inline; don't fail if file + does not exist + + * Replaced /drafts/* with redirects + + * Show datatracker version+date in new look, too + + * IESG discuss page: shade even/odd rows + + * Use new look for IETF meeting agenda; add color-coding support (like + tools has). Also use new look for /iesg/agenda/, /ipr/ and /ipr/NNN/ + * Show RFC publication date as YYYY-MM (since we don't currently have the + day) + + * Fix searching for RFCs that are processed by IESG; made 'advanced + search' options mutually exclusive + + * For plain text RFCs, point to www.rfc-editor.org instead of www.ietf.org + + * Fix page title for 404/500 pages (ticket:298) + + + From Henrik: + + * Apply django changeset 12634 to our copy, to permit usernames + containing '@'. See http://code.djangoproject.com/changeset/12634 . + + -- Henrik Levkowetz 11 Mar 2010 14:41:47 +0100 + +ietfdb (2.44) + + From Robert: + + * Moved old (1995-2003) liaison statements from the template file + to the database. This requires applying database fixups as + follows: + + cd /a/www/ietf-datatracker/2.44/ietf + PYTHONPATH=../ python manage.py dbshell < ../test/liaison_fixup.sql + + And updating the liaison files as follows: + + cd /a/www/ietf-datatracker/documents/LIAISON/ + # TAKE A BACKUP OF THE WHOLE DIRECTORY, JUST IN CASE + . /a/www/ietf-datatracker/2.44/test/liaison_fixup.sh + + From Pasi: + + * Use new look for more pages + + * Make obsoletes/updates/etc. lists links (with new urlize_ietf_docs + template filter) + + * Always show year in /idtracker/status/ dates + + * /doc/rfcNNNN/: show the internet-draft name (if known) and link to its + history; show information about post-RFC IESG processing + + * Use yellow for 'abstain' also in /iesg/agenda/documents/ + + * IESG ballot grid icon: show 'abstain' as yellow instead of gray + + * IESG telechat agenda: include "private" links also if logged in, link + from document list + + * Added /doc/all/ page + + * Better output if /doc/ search doesn't match anything + + * Show state-change-notice-to field in /doc/ (to AD/secretariat) + + * Make IESG telechat agenda feed discoverable + + * Added IPR/dependency links and edit/add buttons for RFCs, too; some + template refactoring + + * Avoid exceptions from MySQL when searching for non-ASCII draft names/WG + acronyms/etc. + + * Handle non-ASCII characters gracefully in old cgi-bin script redirects + + * IPR search: handle non-numeric RFC numbers/ID document tags gracefully + (instead of uncaught exception) + + * Change Atom feeds (+couple of other places) to point to /doc/draft-.. + instead of /idtracker/draft-.../ + + From Henrik: + + * In PersonOrOrgInfo.email(), try harder to find a relevant email address. + The new code will eventually fall back to a broad filter which matches that + of the perl code which creates WG charter pages, where the old code would + sometimes result in django-generated charter pages with missing email + addresses. + + * Serve ipr disclosures which are available on the server directly, + instead of through an external http link in an + {% endif %} + {% endif %} - {{ debug }} - {% endblock %} diff --git a/ietf/templates/ipr/details.txt b/ietf/templates/ipr/details.txt index 3bd3bf182..a0be2af85 100644 --- a/ietf/templates/ipr/details.txt +++ b/ietf/templates/ipr/details.txt @@ -1,78 +1,78 @@ -IPR Title: {{ ipr.title }} +{% load ietf_filters %}IPR Title: {{ ipr.title|safe }} -Section I. Patent Holder/Applicant ("Patent Holder") -Legal Name: {{ ipr.legal_name }} -Section II. Patent Holder's Contact for License Application -Name: {{ ipr.holder_contact.name }} -Title: {{ ipr.holder_contact.title }} -Department: {{ ipr.holder_contact.department }} -Address1: {{ ipr.holder_contact.address1 }} -Address2: {{ ipr.holder_contact.address2 }} -Telephone: {{ ipr.holder_contact.telephone }} -Fax: {{ ipr.holder_contact.fax }} -Email: {{ ipr.holder_contact.email }} +Section I: Patent Holder/Applicant ("Patent Holder") + Legal Name: {{ ipr.legal_name|safe }} -Section III. Contact Information for the IETF Participant Whose Personal Belief Triggered -the Disclosure in this Template (Optional): -Name: {{ ipr.ietf_contact.name }} -Title: {{ ipr.ietf_contact.title }} -Department: {{ ipr.ietf_contact.department }} -Address1: {{ ipr.ietf_contact.address1 }} -Address2: {{ ipr.ietf_contact.address2 }} -Telephone: {{ ipr.ietf_contact.telephone }} -Fax: {{ ipr.ietf_contact.fax }} -Email: {{ ipr.ietf_contact.email }} -Section IV. IETF Document or Working Group Contribution to Which Patent Disclosure -Relates -RFC Number(s) : {% for doc in ipr.rfcs.all %}{{ doc.document.rfc_number }} ({{ doc.document.title }}){% if not forloop.last %}, {% endif %}{% endfor %} -I-D File Name(s)(draft-...) : {% for doc in ipr.drafts.all %}{{ doc.document.displayname }} ({{ doc.document.title }}){% if not forloop.last %}, {% endif %}{% endfor %} -Designations for Other Contributions : {{ ipr.other_designations }} +Section II: Patent Holder's Contact for License Application + Name: {{ ipr.holder_contact.name|safe }} + Title: {{ ipr.holder_contact.title|safe }} + Department: {{ ipr.holder_contact.department|safe }} + Address1: {{ ipr.holder_contact.address1|safe }} + Address2: {{ ipr.holder_contact.address2|safe }} + Telephone: {{ ipr.holder_contact.telephone|safe }} + Fax: {{ ipr.holder_contact.fax|safe }} + Email: {{ ipr.holder_contact.email|safe }} -Section V. Disclosure of Patent Information (i.e., patents or patent applications -required to be disclosed by Section 6 of RFC 3979) -A. For granted patents or published pending patent applications, please provide -the following information: -Patent, Serial, Publication, Registration, or Application/File number(s) : {{ ipr.patents }} -Date(s) granted or applied for : {{ ipr.date_applied }} -Country : {{ ipr.country }} -Additional Note(s): -{{ ipr.notes }} -B. Does your disclosure relate to an unpublished pending patent application? {{ ipr.get_is_pending_display }} + +Section III: Contact Information for the IETF Participant Whose Personal Belief Triggered the Disclosure in this Template (Optional): + Name: {{ ipr.ietf_contact.name|safe }} + Title: {{ ipr.ietf_contact.title|safe }} + Department: {{ ipr.ietf_contact.department|safe }} + Address1: {{ ipr.ietf_contact.address1|safe }} + Address2: {{ ipr.ietf_contact.address2|safe }} + Telephone: {{ ipr.ietf_contact.telephone|safe }} + Fax: {{ ipr.ietf_contact.fax|safe }} + Email: {{ ipr.ietf_contact.email|safe }} + + +Section IV: IETF Document or Working Group Contribution to Which Patent Disclosure Relates + RFC Number(s): {% for doc in ipr.rfcs.all %}{{ doc.document.rfc_number|safe }} ({{ doc.document.title|safe }}){% if not forloop.last %}, {% endif %}{% endfor %} + Internet-Draft(s): {% for doc in ipr.drafts.all %}{{ doc.document.filename|safe }} ({{ doc.document.title|safe }}){% if not forloop.last %}, {% endif %}{% endfor %} + Designations for Other Contributions: {{ ipr.other_designations|safe }} + + +Section V: Disclosure of Patent Information (i.e., patents or patent applications required to be disclosed by Section 6 of RFC 3979) + +A. For granted patents or published pending patent applications, please provide the following information: + Patent, Serial, Publication, Registration, or Application/File number(s): {{ ipr.patents|safe }} + Date(s) granted or applied for: {{ ipr.date_applied|safe }} + Country: {{ ipr.country|safe }} + Additional Note(s): + {{ ipr.notes|safe }} + +B. Does your disclosure relate to an unpublished pending patent application? {{ ipr.get_is_pending_display|safe }} {# missing ipr.applies_to_all #} -C. If an Internet-Draft or RFC includes multiple parts and it is not reasonably apparent which part of such Internet-Draft or RFC is alleged to be covered by -the patent information disclosed in Section V(A) or V(B), it is helpful if the -discloser identifies here the sections of the Internet-Draft or RFC that are -alleged to be so covered. : -{{ ipr.document_sections }} -Section VI. Licensing Declaration The Patent Holder states that, upon approval by the IESG for publication as an -RFC of the relevant IETF specification, its position with respect to licensing -any patent claims contained in the patent(s) or patent application(s) disclosed -above that would be necessary to implement the technology required by such IETF -specification ("Patent Claims"), for the purpose of implementing the specification, is as follows(select one licensing declaration option only): +C. If an Internet-Draft or RFC includes multiple parts and it is not reasonably apparent which part of such Internet-Draft or RFC is alleged to be covered by the patent information disclosed in Section V(A) or V(B), it is helpful if the discloser identifies here the sections of the Internet-Draft or RFC that are alleged to be so covered: +{{ ipr.document_sections|safe }} + + +Section VI: Licensing Declaration + +The Patent Holder states that, upon approval by the IESG for publication as an RFC of the relevant IETF specification, its position with respect to licensing any patent claims contained in the patent(s) or patent application(s) disclosed above that would be necessary to implement the technology required by such IETF specification ("Patent Claims"), for the purpose of implementing the specification, is as follows(select one licensing declaration option only): Selection: -{{ ipr.get_licensing_option_display }} + {{ ipr.get_licensing_option_display|safe }} {% if ipr.stdonly_license %} -{{ ipr.stdonly_license }} + {{ ipr.stdonly_license|safe }} Above licensing declaration is limited solely to standards-track IETF documents. {% endif %} -Licensing information, comments, notes or URL for further information : -{{ ipr.comments }} +Licensing information, comments, notes or URL for further information: + {{ ipr.comments|safe }} -Section VII. Contact Information of Submitter of this Form (if different from -IETF Participant in Section III above) -Name: {{ ipr.submitter.name }} -Title: {{ ipr.submitter.title }} -Department: {{ ipr.submitter.department }} -Address1: {{ ipr.submitter.address1 }} -Address2: {{ ipr.submitter.address2 }} -Telephone: {{ ipr.submitter.telephone }} -Fax: {{ ipr.submitter.fax }} -Email: {{ ipr.submitter.email }} -Section VIII. Other Note(s): -{{ ipr.other_notes }} +Section VII: Contact Information of Submitter of this Form (if different from IETF Participant in Section III above) + Name: {{ ipr.submitter.name|safe }} + Title: {{ ipr.submitter.title|safe }} + Department: {{ ipr.submitter.department|safe }} + Address1: {{ ipr.submitter.address1|safe }} + Address2: {{ ipr.submitter.address2|safe }} + Telephone: {{ ipr.submitter.telephone|safe }} + Fax: {{ ipr.submitter.fax|safe }} + Email: {{ ipr.submitter.email|safe }} + +Section VIII: Other Note(s) +{{ ipr.other_notes|safe }} diff --git a/ietf/templates/ipr/details_edit.html b/ietf/templates/ipr/details_edit.html new file mode 100644 index 000000000..db1bc3244 --- /dev/null +++ b/ietf/templates/ipr/details_edit.html @@ -0,0 +1,383 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} +{% load ietf_filters %} +{% block title %}IPR Details - Form{% endblock %} +{% block bodyAttrs %} + {% if section_list.holder_contact %}onload="toggle_submitter_info('holder')"{% endif %} + {% if section_list.ietf_contact %}onload="toggle_submitter_info('ietf')"{% endif %} +{% endblock bodyAttrs %} + +{% block morecss %} +table.ipr { margin-top: 1em; } +.ipr .light td { background: #eeeeee; } +.ipr .dark td { background: #dddddd; } +.ipr th { background: #2647a0; color: white; } +.ipr { width: 101ex; border: 0; border-collapse: collapse; } +.ipr th, .ipr td { padding: 3px 6px; text-align: left; } +.ipr tr { vertical-align: top; } +.ipr td.iprlabel { width: 18ex; } +.iprdata { font-weight: bold; } + +.iprdata li { list-style:none;} + +.required { color: red; float: right; padding-top: 0.7ex; font-size: 130%; } +.errorlist { background: red; color: white; padding: 0.2ex 0.2ex 0.2ex 0.5ex; border: 0px; margin: 0px; } + ul.errorlist { margin: 0px; } +{% endblock %} + +{% block content %} + +{% include "ipr/js.html" %} + +

The Patent Disclosure and Licensing Declaration Template for {{ section_list.disclosure_type }}

+ +{% if section_list.generic %} +

This document is an IETF IPR Patent Disclosure and Licensing +Declaration Template and is submitted to inform the IETF of a) patent +or patent application information that is not related to a specific +IETF document or contribution, and b) an IPR Holder's intention with +respect to the licensing of its necessary patent claims. No actual +license is implied by submission of this template.

+{% endif %} + +{% if section_list.specific %} +

This document is an IETF IPR Disclosure and Licensing Declaration +Template and is submitted to inform the IETF of a) patent or patent +application information regarding the IETF document or contribution +listed in Section IV, and b) an IPR Holder's intention with respect to +the licensing of its necessary patent claims. No actual license is +implied by submission of this template. Please complete and submit a +separate template for each IETF document or contribution to which the +disclosed patent information relates.

+{% endif %} + +{% if section_list.third_party %} +

This form is used to let the IETF know about patent information +regarding an IETF document or contribution when the person letting the +IETF know about the patent has no relationship with the patent owners. +Click here +if you want to disclose information about patents or patent +applications where you do have a relationship to the patent owners or +patent applicants.

+ {% endif %} + +{% if section_list.also_specific %} +

Note: According to Section 6.4.3 of +RFC 3979, +"Intellectual Property Rights in IETF Technology," you +are still required to file specific disclosures on IPR unless your +generic disclosure satisfies certain conditions. Please see the +RFC for details.

+ {% endif %} + +

If you wish to submit your IPR disclosure by e-mail, then please send +it to ietf-ipr@ietf.org. +Submissions made by e-mail that do not comply with the formal +requirements of Section 6, "IPR Disclosures," of +RFC 3979, +"Intellectual Property Rights in IETF Technology", +will be posted, but will be marked as "non-compliant".

+ +
+ + {% if ipr.errors %} +

+ There were errors in the submitted form -- see below. Please correct these and resubmit. + {% if ipr.non_field_errors %} +

    + {% for error in ipr.non_field_errors %} +
  • {{ error }}
  • + {% endfor %} +
+ {% endif %} +

+ {% endif %} + +

+ Fields marked with "*" are required. +

+ + + {% if section_list.holder %} + + + + + + + +
+ {% cycle I,II,III,IV,V,VI,VII,VIII as section %}. + {% if section_list.third_party %}Possible{% endif %} + Patent Holder/Applicant ("Patent Holder") +
Legal Name: {{ ipr.legal_name.errors }} * {{ ipr.legal_name }}
+ {% endif %} + + {% if section_list.holder_contact %} + + + + {% for field in ipr.holder_contact %} + {% ifnotequal field.name "update_auth" %} + + {% endifnotequal %} + {% endfor %} +
+ {% cycle section %}. + Patent Holder's Contact for License Application +
{{field.label }}:{{ field.errors }} {% if field.field.required %}*{%endif%} {{ field }}
+ {% endif %} + + {% if section_list.ietf_contact %} + + + + + {% for field in ipr.ietf_contact %} + {% ifnotequal field.name "update_auth" %} + + {% endifnotequal %} + {% endfor %} +
+ {% cycle section %}. + Contact Information for the IETF Participant Whose Personal Belief Triggered this Disclosure: +
{{field.label }}:{{ field.errors }} {% if field.field.required %}*{%endif%} {{ field }}
+ {% endif %} + + + {% if section_list.ietf_doc %} + + + + + + + +
+ * + {% cycle section %}. + IETF Document or Other Contribution to Which this IPR Disclosure Relates: +
RFC Numbers:{{ ipr.rfclist.errors }} {{ ipr.rfclist }}
I-D Filenames (draft-...):{{ ipr.draftlist.errors}} {{ ipr.draftlist }}
Designations for Other Contributions:{{ ipr.other_designations.errors }} {{ ipr.other_designations }}
+ {% endif %} + + {% if section_list.patent_info %} + + + + + {% if ipr.patents or ipr.notes %} + + + + + + + + + + + + + + {% if section_list.generic %} + + + + {% else %} + + + + {% if ipr.document_sections %} + + {% else %} + + {% endif %} + {% endif %} + + {% else %} + + {% endif %} + +
+ {% cycle section %}. + {% if section_list.generic %} + Disclosure of Patent Information (i.e., patents or patent + applications required to be disclosed by Section 6 of RFC3979) + {% endif %} + {% if section_list.specific %} + Disclosure of Patent Information (i.e., patents or patent + applications required to be disclosed by Section 6 of RFC3979) + {% endif %} + {% if section_list.third_party %} + Disclosure of Patent Information, if known (i.e., patents or + patent applications required to be disclosed by Section 6 of RFC3979) + {% endif %} +
+ A. For granted patents or published pending patent applications, + please provide the following information:
Patent, Serial, Publication, Registration, + or Application/File number(s): {{ ipr.patents.errors }} * {{ ipr.patents }}
Date(s) granted or applied for: {{ ipr.date_applied.errors }} * {{ ipr.date_applied }}
Country: {{ ipr.country.errors }} * {{ ipr.country }}
Additional Notes: {{ ipr.notes.errors }} {{ ipr.notes }}
+ B. Does your disclosure relate to an unpublished pending patent application?: +
{{ ipr.is_pending.errors }} {{ ipr.is_pending }}
+
+ C. Does this disclosure apply to all IPR owned by + the submitter?: +
{{ ipr.applies_to_all.errors }} {{ ipr.applies_to_all }}
+
+ C. If an Internet-Draft or RFC includes multiple parts and it is not + reasonably apparent which part of such Internet-Draft or RFC is alleged + to be covered by the patent information disclosed in Section + V(A) or V(B), it is helpful if the discloser identifies here the sections of + the Internet-Draft or RFC that are alleged to be so + covered: +
{{ ipr.document_sections.errors }} {{ ipr.document_sections }}
No information submitted
This disclosure relates to an unpublished pending patent application.
+ {% endif %} + + + {% if section_list.licensing %} + + + + + + + + + + + + + + + + + + + + + + + {% if ipr.lic_checkbox %} + + + + + + {% endif %} + + + +
+ {% cycle section %}. + Licensing Declaration +
+ {% if section_list.generic %} + The Patent Holder states that its position with respect + to licensing any patent claims contained in the patent(s) or patent + application(s) disclosed above that would necessarily be infringed by + implementation of a technology required by a relevant IETF specification + ("Necessary Patent Claims"), for the purpose of implementing such + specification, is as follows (select one licensing declaration option only): + {% endif %} + {% if section_list.specific %} + The Patent Holder states that its position with respect + to licensing any patent claims contained in the patent(s) or patent + application(s) disclosed above that would necessarily be infringed by + implementation of the technology required by the relevant IETF specification + ("Necessary Patent Claims"), for the purpose of implementing such + specification, is as follows (select one licensing declaration option only): + {% endif %} + {% if section_list.third_party %} + The Patent Holder states that its position with respect + to licensing any patent claims contained in the patent(s) or patent + application(s) disclosed above that would necessarily be infringed by + implementation of the technology required by the relevant IETF specification + ("Necessary Patent Claims"), for the purpose of implementing such + specification, is as follows (select one licensing declaration option only): + {% endif %} +
+ {{ ipr.licensing_option.errors }} {{ ipr.licensing_option }} +
{{ ipr.stdonly_license.errors }} + {{ ipr.stdonly_license }} + Above licensing declaration is limited solely to standards-track IETF documents. +
+ Licensing information, comments, notes, or URL for further information: +
{{ ipr.comments.errors }} {{ ipr.comments }}
+

+ {{ ipr.lic_checkbox.errors }} + {% ifnotequal ipr.lic_checkbox 1 %}{{ ipr.lic_checkbox }}{% endifnotequal %} + The individual submitting this template represents and warrants that all + terms and conditions that must be satisfied for implementers of any + covered IETF specification to obtain a license have been disclosed in this + IPR disclosure statement. +

+ {% if section_list.generic %} +

+ Note: According to + RFC 3979, + Section 6.4.3, unless you check the box + above, and choose either option a) or b), you must still file specific + IPR disclosures as appropriate. +

+ {% endif %} +
+ Note: The individual submitting this template represents and warrants + that he or she is authorized by the Patent Holder to agree to the + above-selected licensing declaration. +
+ {% endif %} + + + {% if section_list.submitter %} + + + + + {% if ipr.ietf_contact_is_submitter %} + + {% if section_list.holder_contact %} + + + + {% endif %} + {% if section_list.ietf_contact %} + + + + {% endif %} + + {% endif %} + {% for field in ipr.submitter %} + {% ifnotequal field.name "update_auth" %} + + {% endifnotequal %} + {% endfor %} +
+ {% cycle section %}. + Contact Information of Submitter of this Form (if different from the + Contact Information above) +
+ Same as in Section II above: + +
+ Same as in Section III above: + +
{{field.label }}:{{ field.errors }} {% if field.field.required %}*{%endif%} {{ field }}
+ {% endif %} + + + {% if section_list.notes %} + + + + + +
+ {% cycle section %}. + Other Notes: +
{{ ipr.other_notes.errors }} {{ ipr.other_notes }}
+ {% endif %} + + +
+ +{% endblock %} diff --git a/ietf/templates/ipr/disclosure.html b/ietf/templates/ipr/disclosure.html index 06d920890..e4c042b4d 100644 --- a/ietf/templates/ipr/disclosure.html +++ b/ietf/templates/ipr/disclosure.html @@ -2,14 +2,9 @@ {% extends "base.html" %} {% block title %}IPR Disclosure Page{% endblock %} {% block content %} -
- -
-
-
- -
-

+

IPR Disclosure Page

+ +

This page provides a mechanism for filing Disclosures about intellectual property rights (IPR) and for finding out what IPR Disclosures have been filed. The IETF intellectual property rights rules are defined in RFC 3979, @@ -22,48 +17,36 @@ pertain to the implementation or use of the technology described in any IETF documents or the extent to which any license under such rights might or might not be available; nor does it represent that it has made any independent effort to identify any such rights.

-

-


-

- File a disclosure about your IPR related to a specific IETF contribution -

-

File an IPR disclosure that is not related to a specific IETF contribution -

-

Notify the IETF of IPR other than your own -

-

Update an existing IPR disclosure -

-

Search the IPR disclosures -

-

List of IPR disclosures

-

-
-

To remove an IPR disclosure from the list, please contact the IETF Secretariat at ietf-ipr@ietf.org. -

-
-

IETF Contribution: - any submission to the IETF intended by the Contributor for publication as all or part of an Internet-Draft or RFC (except for RFC Editor Contributions described below) and any statement made within the context of an IETF activity. Such statements include oral statements in IETF sessions, as well as written and electronic communications made at any time or place, which are addressed to:
-

-

- the IETF plenary session,
- - any IETF working group or portion thereof,
- - the IESG, or any member thereof on behalf or the IESG,
- - the IAB or any member thereof on behalf of the IAB,
- - any IETF mailing list, including the IETF list itself, any working group or design team list, or any other list functioning under IETF auspices,
- - the RFC Editor or the Internet-Drafts function (except for RFC Editor Contributions described below).
+


+ + +

To remove an IPR disclosure from the list, please contact the IETF Secretariat at ietf-ipr@ietf.org.

+ +
+

IETF Contribution: + any submission to the IETF intended by the Contributor for publication as all or part of an Internet-Draft or RFC (except for RFC Editor Contributions described below) and any statement made within the context of an IETF activity. Such statements include oral statements in IETF sessions, as well as written and electronic communications made at any time or place, which are addressed to:

+

- the IETF plenary session,
+ - any IETF working group or portion thereof,
+ - the IESG, or any member thereof on behalf or the IESG,
+ - the IAB or any member thereof on behalf of the IAB,
+ - any IETF mailing list, including the IETF list itself, any working group or design team list, or any other list functioning under IETF auspices,
+ - the RFC Editor or the Internet-Drafts function (except for RFC Editor Contributions described below).

Statements made outside of an IETF session, mailing list or other function, that are clearly not intended to be input to an IETF activity, group or function, are not IETF Contributions in the context of this document.

-

A participant in any IETF activity is deemed to accept all IETF rules of process, as documented in Best Current Practices RFCs and IESG Statements. -

+

A participant in any IETF activity is deemed to accept all IETF rules of process, as documented in Best Current Practices RFCs and IESG Statements

-
-
-A participant in any IETF activity acknowledges that written, audio and video records of meetings may be made and may be available to the public. -

-

- RFC Editor Contribution: +

A participant in any IETF activity acknowledges that written, audio and video records of meetings may be made and may be available to the public.

+ +

RFC Editor Contribution: An Internet-Draft intended by the Contributor to be submitted to the - RFC Editor for publication as an Informational or Experimental RFC but not intended to be part of the IETF Standards Process.
-

+ RFC Editor for publication as an Informational or Experimental RFC but not intended to be part of the IETF Standards Process.

{% endblock %} diff --git a/ietf/templates/ipr/drafts.html b/ietf/templates/ipr/drafts.html new file mode 100644 index 000000000..07ca916ee --- /dev/null +++ b/ietf/templates/ipr/drafts.html @@ -0,0 +1,3 @@ +# Machine-readable list of ipr disclosures by draft name +{% for doc in docs %}{{doc.name}}{% for num in doc.iprs %} {{ num }}{% endfor %} +{% endfor %} \ No newline at end of file diff --git a/ietf/templates/ipr/formfield.html b/ietf/templates/ipr/formfield.html deleted file mode 100644 index d12d66707..000000000 --- a/ietf/templates/ipr/formfield.html +++ /dev/null @@ -1,10 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} - {% if errors %} -
    - {% for error in errors %} -
  • {{ error }}
  • - {% endfor %} -
- {% endif %} - {% if field.required %}*{% endif %}{{ text }} - {{ help_text }} \ No newline at end of file diff --git a/ietf/templates/ipr/iprdetail_archive.html b/ietf/templates/ipr/iprdetail_archive.html deleted file mode 100644 index b48f0b265..000000000 --- a/ietf/templates/ipr/iprdetail_archive.html +++ /dev/null @@ -1,29 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} - -{% block title %}IPR Declarations{% endblock %} - -{% block content %} -

IPR Declarations

- -

Recent IPR Declarations

-{% regroup latest by telechat_date|date:"Y" as years %} -{% for year in years %} -{% ifnotequal latest|length "1" %} -

{{ year.grouper }}

-{% endifnotequal %} - -{% for ipr in year.list %} - {% include "ipr/list_item.html" %} -{% endfor %} -
-{% endfor %} - -

Archives by Year

- - -{% endblock %} diff --git a/ietf/templates/ipr/iprdetail_archive_month.html b/ietf/templates/ipr/iprdetail_archive_month.html deleted file mode 100644 index 79fcccf47..000000000 --- a/ietf/templates/ipr/iprdetail_archive_month.html +++ /dev/null @@ -1,28 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} - -{% block title %}IPR Declarations{% endblock %} - -{% block content %} -

IPR Declarations - {{ month|date:"F Y" }}

- -

-<< {{ previous_month|date:"F Y" }} -| {{ month|date:"Y" }} -{% if next_month %} -| {{ next_month|date:"F Y" }} >> -{% endif %} -

-

-{% if object_list %} - - {% for ipr in object_list %} - {% include "ipr/list_item.html" %} - {% endfor %} -
-{% else %} -No declarations for {{ month|date:"F Y "}} -{% endif %} -

- -{% endblock %} diff --git a/ietf/templates/ipr/iprdetail_archive_year.html b/ietf/templates/ipr/iprdetail_archive_year.html deleted file mode 100644 index b6a4416ae..000000000 --- a/ietf/templates/ipr/iprdetail_archive_year.html +++ /dev/null @@ -1,17 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} - -{% block title %}IPR Declarations{% endblock %} - -{% block content %} -

IPR Declarations - {{ year }}

- -All dates - - - -{% endblock %} diff --git a/ietf/templates/ipr/licensing_option.html b/ietf/templates/ipr/licensing_option.html deleted file mode 100644 index a794a3e98..000000000 --- a/ietf/templates/ipr/licensing_option.html +++ /dev/null @@ -1,28 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% ifequal ipr.licensing_option 1 %} - No License Required for Implementers. -{% endifequal%} - -{% ifequal ipr.licensing_option 2 %} - Royalty-Free, Reasonable and Non-Discriminatory License to All Implementers. -{% endifequal%} - -{% ifequal ipr.licensing_option 3 %} - Reasonable and Non-Discriminatory License to All Implementers with Possible Royalty/Fee. -{% endifequal%} - -{% ifequal ipr.licensing_option 4 %} - Licensing Declaration to be Provided Later. -{% endifequal%} - -{% ifequal ipr.licensing_option 5 %} - Unwilling to Commit to the Provisions. -{% endifequal%} - -{% ifequal ipr.licensing_option 6 %} - See Text Below for Licensing Declaration. -{% endifequal%} - -{% if ipr.lic_opt_a_sub or ipr.lic_opt_b_sub or ipr.lic_opt_c_sub %} - The licensing declaration is limited solely to standards-track IETF documents. -{% endif%} diff --git a/ietf/templates/ipr/list.html b/ietf/templates/ipr/list.html index 7ea9271bc..7b9ebc6d2 100644 --- a/ietf/templates/ipr/list.html +++ b/ietf/templates/ipr/list.html @@ -1,63 +1,47 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} {% extends "base.html" %} -{% block title %}IPR Update{% endblock %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} +{% block title %}IPR List{% endblock %} {% block content %} +

IETF Page of Intellectual Property Rights Disclosures

-

IETF Page of Intellectual Property Rights Disclosures.

-This page provides a mechanism for filing disclosures about intellectual property rights (IPR) and for finding out what IPR disclosures have been filed. The IETF intellectual property rights rules are defined in RFC 3979, "Intellectual Property Rights in IETF Technology." -

- +This page provides a mechanism for filing disclosures about intellectual property rights (IPR) and for finding out what IPR disclosures have been filed. The IETF intellectual property rights rules are defined in RFC 3979, "Intellectual Property Rights in IETF Technology."

+

The IETF takes no position regarding the validity or scope of any intellectual property rights or other rights that might be claimed to pertain to the implementation or use of the technology described in any IETF documents or the extent to -which any license under such rights might or might not be available; nor does it represent that it has made any independent effort to identify any such rights. -

-Click here to submit an IPR disclosure +which any license under such rights might or might not be available; nor does it represent that it has made any independent effort to identify any such rights.

+

Click here to submit an IPR disclosure

-

-

Search the IPR Disclosures

-

Generic IPR Disclosures

-

Specific IPR Disclosures

-

Specific Third Party IPR Disclosures

-


+Search the IPR Disclosures - -The IPR disclosures within each category are listed by date in descending - order. - -
-

Generic IPR Disclosures


- - +

Generic IPR Disclosures

+
Date SubmittedID #Title of IPR Disclosure
+ {% for ipr in generic_disclosures %} {% include "ipr/list_item.html" %} {% endfor %}
Date SubmittedID #Title of IPR Disclosure
-

Back to Top

-

Specific IPR Disclosures


- - - +

Specific IPR Disclosures

+
Date SubmittedID #Title of IPR Disclosure
+ {% for ipr in specific_disclosures %} {% include "ipr/list_item.html" %} {% endfor %}
Date SubmittedID #Title of IPR Disclosure
-
-

Back to Top

-

Specific Third Party IPR Disclosures


- - +

Specific Third Party IPR Disclosures

+
Date SubmittedID #Title of IPR Disclosure
+ {% for ipr in thirdpty_disclosures %} {% include "ipr/list_item.html" %} {% endfor %}
Date SubmittedID #Title of IPR Disclosure
{% endblock %} - \ No newline at end of file + diff --git a/ietf/templates/ipr/list_item.html b/ietf/templates/ipr/list_item.html index 998a7c2ec..472b1d536 100644 --- a/ietf/templates/ipr/list_item.html +++ b/ietf/templates/ipr/list_item.html @@ -1,23 +1,23 @@ {# Copyright The IETF Trust 2007, All Rights Reserved #} - - {{ ipr.submitted_date }} - {{ ipr.ipr_id }} + + {{ ipr.submitted_date }} + {{ ipr.ipr_id }} {% ifequal ipr.status 1 %} - {{ ipr.title }} + {{ ipr.title|escape }} {% else %} - {{ ipr.title }} -
This IPR disclosure was removed at the request of the submitter. + {{ ipr.title|escape }} +
This IPR disclosure was removed at the request of the submitter. {% endifequal %}
{% for item in ipr.updates.all %} {% ifequal item.updated.status 1 %} - Updates ID #{{ item.updated.ipr_id }}.
+ Updates ID #{{ item.updated.ipr_id }}.
{% endifequal %} {% endfor %} {% for item in ipr.updated_by.all %} {% ifequal item.processed 1 %} - Updated by ID #{{ item.ipr.ipr_id }}.
+ Updated by ID #{{ item.ipr.ipr_id }}.
{% endifequal %} {% endfor %} @@ -29,7 +29,7 @@ * - {{ ipr.legacy_title_1 }} + {{ ipr.legacy_title_1|escape }} {% endif %} @@ -39,7 +39,7 @@ * - {{ ipr.legacy_title_2 }} + {{ ipr.legacy_title_2|escape }} {% endif %} diff --git a/ietf/templates/ipr/removed.html b/ietf/templates/ipr/removed.html new file mode 100644 index 000000000..53a31604d --- /dev/null +++ b/ietf/templates/ipr/removed.html @@ -0,0 +1,12 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2009, All Rights Reserved #} +{% block title %}IPR Details - {{ ipr.title|escape }}{% endblock %} +{% block content %} + +

{{ ipr.title|escape }}

+ + +This IPR disclosure was removed at the submitter's request. + + +{% endblock %} diff --git a/ietf/templates/ipr/search.html b/ietf/templates/ipr/search.html index 2cf601ebf..cdc7bfef5 100644 --- a/ietf/templates/ipr/search.html +++ b/ietf/templates/ipr/search.html @@ -1,20 +1,24 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} {% extends "base.html" %} -{% block doctype %}{% endblock %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} {% block title %}IPR Search{% endblock %} + +{% block morecss %} +form { clear:both; margin-top:1em;} +label { float:left; width: 200px; } +{% endblock %} {% block content %} -


- Document Search -

- + +

IPR Search

+

Document Search

+
-
- - - + + + + - - - - - + + + -
Enter I-D Filename ( draft-...)
or Enter RFC Number
-




- Keyword Search +
+ +

Keyword Search

+ +
-

- - - - - - + + + - - - - - - + + + + + * The search string must contain at least three characters, including at least one digit, and include punctuation marks. For best results, please enter the entire string, or as much of it as possible. + - - - - - + - - - - - - + + + + - - - - - + + + + + -
Name of patent owner/applicant
Characters in patent information (Full/Partial)
- * The search string must contain at least three characters, including at least one digit, and include punctuation marks. For best results, please enter the entire string, or as much of it as possible. -
Working group name + -
Words in document title
Words in IPR disclosure title
-


-
IPR Disclosure Page -
+

Back to IPR Disclosure Page

{% endblock %} diff --git a/ietf/templates/ipr/search_doc_list.html b/ietf/templates/ipr/search_doc_list.html index c14e482f6..31e18a2fa 100644 --- a/ietf/templates/ipr/search_doc_list.html +++ b/ietf/templates/ipr/search_doc_list.html @@ -1,17 +1,14 @@ +{% extends "base.html" %} {# Copyright The IETF Trust 2007, All Rights Reserved #} - -IPR Search Engine - +{% block title %}IPR Disclosures - Select Internet-Draft{% endblock %} -
-

IPR Disclosures

-
-
+{% block content %} +

IPR Disclosures - Selct Internet-Draft

-

Please select one of following I-Ds

- - \ No newline at end of file +

Please select one of following I-Ds

+ +{% endblock %} diff --git a/ietf/templates/ipr/search_doc_result.html b/ietf/templates/ipr/search_doc_result.html index 14e130fca..800a413e3 100644 --- a/ietf/templates/ipr/search_doc_result.html +++ b/ietf/templates/ipr/search_doc_result.html @@ -9,7 +9,7 @@ {{ ipr.submitted_date }}
  • ID # {{ ipr.ipr_id }}
  • - "{{ ipr.title }}" + "{{ ipr.title|escape }}" {% endfor %} @@ -18,7 +18,7 @@ - Search result on {{ doc|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.title }}"{% ifnotequal doc first %}{% if doc.related %}, {{ doc.relation }} {{ doc.related|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.related.title }}"{% endif %} + Search result on {{ doc|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.title|escape }}"{% ifnotequal doc first %}{% if doc.related %}, {{ doc.relation }} {{ doc.related|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.related.title }}"{% endif %} {% endifnotequal %} @@ -27,13 +27,17 @@ {{ ipr.submitted_date }}
  • ID # {{ ipr.ipr_id }}
  • - "{{ ipr.title }}" + "{{ ipr.title|escape }}" {% endfor %} {% else %} - No IPR disclosures related to {{ doc|rfcspace|lstrip:"0" }} have been submitted + + No IPR disclosures have been submitted directly on {{ doc|rfcspace|lstrip:"0" }}{% if iprs %}, + but there are disclosures on {% ifequal docs|length 2 %}a related document{% else %}related documents{% endifequal %}, listed on this page{% endif %}. + + {% endif %} diff --git a/ietf/templates/ipr/search_doctitle_result.html b/ietf/templates/ipr/search_doctitle_result.html index 73e1227a3..6f134deea 100644 --- a/ietf/templates/ipr/search_doctitle_result.html +++ b/ietf/templates/ipr/search_doctitle_result.html @@ -19,7 +19,7 @@ - IPR that is related to {{ doc|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.title }}"{% if doc.related %}, {{ doc.relation }} {{ doc.related|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.related.title }}"{% endif %}, + IPR that is related to {{ doc|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.title|escape }}"{% if doc.related %}, {{ doc.relation }} {{ doc.related|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.related.title }}"{% endif %}, which has the string "{{ q }}" within the document title. @@ -31,10 +31,10 @@ {% for item in ipr.updated_by.all %} {% ifequal item.processed 1 %} - IPR disclosure ID# {{ item.ipr.ipr_id }} "{{ item.ipr.title }}" Updates + IPR disclosure ID# {{ item.ipr.ipr_id }} "{{ item.ipr.title|escape }}" Updates {% endifequal %} {% endfor %} - "{{ ipr.title }}" + "{{ ipr.title|escape }}" {% endfor %} diff --git a/ietf/templates/ipr/search_error.html b/ietf/templates/ipr/search_error.html index f4b0c6d96..2601b7c58 100644 --- a/ietf/templates/ipr/search_error.html +++ b/ietf/templates/ipr/search_error.html @@ -1,17 +1,13 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} {% extends "base.html" %} -{% block doctype %}{% endblock %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} {% block title %}IPR Search Result Error{% endblock %} {% block content %} -
    -

    Error

    -

    - Your request was not processed due to the following error(s): -

    +

    IPR Search Result Error

    + +

    Your request was not processed due to the following error(s):

    {{ error }}

    -
    {% endblock %} diff --git a/ietf/templates/ipr/search_result.html b/ietf/templates/ipr/search_result.html index 03e327c70..9a36cffba 100644 --- a/ietf/templates/ipr/search_result.html +++ b/ietf/templates/ipr/search_result.html @@ -1,13 +1,11 @@ {# Copyright The IETF Trust 2007, All Rights Reserved #} {% extends "base.html" %} +{% block morecss %} +{% endblock %} {% block doctype %}{% endblock %} {% block title %}IPR Search Result{% endblock %} {% block content %} -
    -

    IPR Disclosures

    -
    -
    -
    +

    IPR Disclosures

    {% block search_result %} @@ -21,7 +19,7 @@ {% block iprlist %} {% for ipr in iprs %} - + - + {% for item in ipr.updates.all %} {% ifnotequal item ipr %} @@ -50,8 +48,8 @@ {% endifnotequal %} @@ -64,9 +62,8 @@ {% endblock %}

    - IPR Search Main Page
    - IPR Disclosure Page + IPR Search Main Page
    + IPR Disclosure Page
    - {% endblock %} diff --git a/ietf/templates/ipr/search_wg_result.html b/ietf/templates/ipr/search_wg_result.html index 99dff9594..d14dc9438 100644 --- a/ietf/templates/ipr/search_wg_result.html +++ b/ietf/templates/ipr/search_wg_result.html @@ -20,7 +20,7 @@ @@ -32,10 +32,10 @@ {% endfor %} diff --git a/ietf/templates/ipr/style.html b/ietf/templates/ipr/style.html deleted file mode 100644 index 17cb1b808..000000000 --- a/ietf/templates/ipr/style.html +++ /dev/null @@ -1,38 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} - diff --git a/ietf/templates/ipr/submitted.html b/ietf/templates/ipr/submitted.html index 4fcfb453c..979e1dc22 100644 --- a/ietf/templates/ipr/submitted.html +++ b/ietf/templates/ipr/submitted.html @@ -1,16 +1,12 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} {% extends "base.html" %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} {% block title %}IPR Submission{% endblock %} {% block content %} -
    - Your IPR disclosure has been submitted, and the IETF Secretariat has been notified.
    +

    Your IPR disclosure has been submitted, and the IETF Secretariat has been notified.

    -Please note that it may take one business day for your IPR disclosure to be posted
    -on the IETF Page of Intellectual Property Rights Notices.

    - - -

    - Back to IPR Disclosure Page
    +

    Please note that it may take one business day for your IPR disclosure to be posted +on the IETF Page of Intellectual Property Rights Notices.

    +

    Back to IPR Disclosure Page

    {% endblock %} diff --git a/ietf/templates/ipr/update.html b/ietf/templates/ipr/update.html index aee1140c3..f424e9d82 100644 --- a/ietf/templates/ipr/update.html +++ b/ietf/templates/ipr/update.html @@ -1,13 +1,30 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} {% extends "base.html" %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} {% block title %}IPR Update{% endblock %} + +{% block morecss %} +table.ipr { margin-top: 1em; } +.ipr .light td { background: #eeeeee; } +.ipr .dark td { background: #dddddd; } +.ipr th { background: #2647a0; color: white; } +.ipr { width: 101ex; border: 0; border-collapse: collapse; } +.ipr th, .ipr td { padding: 3px 6px; text-align: left; } +.ipr tr { vertical-align: top; } +.ipr td.iprlabel { width: 18ex; } +.iprdata { font-weight: bold; } + +.required { color: red; float: right; padding-top: 0.7ex; font-size: 130%; } + .errorlist { background: red; color: white; padding: 0.2ex 0.2ex 0.2ex 0.5ex; border: 0px; margin: 0px; font-family: Arial, sans-serif; } + ul.errorlist { margin: 0px; } + .errorlist { background: red; color: white; padding: 0.2ex 0.2ex 0.2ex 0.5ex; border: 0px; margin: 0px; font-family: Arial, sans-serif; } + ul.errorlist { margin: 0px; } +{% endblock %} + {% block content %} -{% include "ipr/style.html" %} +

    Updating {{ type|title }} IPR Disclosure
    {{ ipr.title|escape }}

    -

    Updating {{ type|title }} IPR Disclosures
    {{ ipr.title|escape }}

    - -
    + {% if form.errors %}

    There were errors in the submitted form -- see below. Please correct these and resubmit. @@ -21,28 +38,25 @@

    {% endif %} -
    -
    Total number of IPR disclosures found: {{ count }}
    {% block intro_prefix %}IPR that was submitted by {{ q }}, and{% endblock %} @@ -32,7 +30,7 @@ is related to {% for item in ipr.docs %} {% ifequal item ipr.last_draft %} and {% endifequal %} - {{ item.document }}, "{{ item.document.title }},"{% if item.document.related %}, {{ item.document.relation }} {{ item.document.related }}, "{{ item.document.related.title }}"{% endif %} + {{ item.document }}, "{{ item.document.title|escape }},"{% if item.document.related %}, {{ item.document.relation }} {{ item.document.related }}, "{{ item.document.related.title|escape }}"{% endif %} {% endfor %} {% endif %} {% endblock %} @@ -42,7 +40,7 @@
    {{ ipr.submitted_date }}
  • ID # {{ ipr.ipr_id }}
  • "{{ ipr.title }}""{{ ipr.title|escape }}"
    {{ item.updated.submitted_date }}
  • ID # {{ item.updated.ipr_id }}
  • - IPR disclosure ID# {{ ipr.ipr_id }} "{{ ipr.title }}" - Updates {{ item.updated.title }} + IPR disclosure ID# {{ ipr.ipr_id }} "{{ ipr.title|escape }}" + Updates {{ item.updated.title|escape }}
    - IPR that is related to {{ doc|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.title }}"{% if doc.related %}, {{ doc.relation }} {{ doc.related|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.related.title }}"{% endif %} + IPR that is related to {{ doc|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.title|escape }}"{% if doc.related %}, {{ doc.relation }} {{ doc.related|rfcspace|lstrip:"0"|rfcnospace }}, "{{ doc.related.title|escape }}"{% endif %} which is a product of Working Group {{ q }}
    {% for item in ipr.updates.all %} {% ifequal item.updated.status 1 %} - IPR disclosure ID# {{ item.updated.ipr_id }}, "{{ item.updated.title }}" Updated by + IPR disclosure ID# {{ item.updated.ipr_id }}, "{{ item.updated.title|escape }}" Updated by {% endifequal %} {% endfor %} - "{{ ipr.title }}" + "{{ ipr.title|escape }}"
    +
    - - - - - - - - + {% for field in form %} + {% ifnotequal field.name "update_auth" %} + + {% endifnotequal %} + {% endfor %}
    Contact Information for Submitter of this Update.
    Name: {{ form.name }}
    Title: {{ form.title }}
    Department: {{ form.department }}
    Address1: {{ form.address1 }}
    Address2: {{ form.address2 }}
    Telephone: {{ form.telephone }}
    Fax: {{ form.fax }}
    Email: {{ form.email }}
    {{field.label }}:{{ field.errors }} {% if field.field.required %}*{%endif%} {{ field }}
    -

    + {{ form.update_auth.errors }} + * {{ form.update_auth }} I am authorized to update this IPR disclosure, and I understand that notification of this update will be provided to the submitter of the original IPR disclosure and to the Patent Holder's Contact.

    - -  
    +

    +  

    {% endblock %} diff --git a/ietf/templates/ipr/update_list.html b/ietf/templates/ipr/update_list.html deleted file mode 100644 index d4b85d015..000000000 --- a/ietf/templates/ipr/update_list.html +++ /dev/null @@ -1,70 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} -{% block title %}IPR Update{% endblock %} -{% block content %} - -

    IPR Disclosure Update Page

    -Click here to submit an IPR disclosure - -

    -

    Search the IPR Disclosures

    -

    Generic IPR Disclosures

    - -

    Specific IPR Disclosures

    -

    Specific Third Party IPR Disclosures

    -


    - - -

    Generic IPR Disclosures

    -Please select the IPR disclosure that you wish to update.
    - Note: Updates to IPR disclosures must only be made by authorized - representatives of the original submitters. Updates will automatically - be forwarded to the current Patent Holder's Contact and to the Submitter - of the original IPR disclosure.
    -

    - - - - {% for ipr in generic_disclosures %} - {% include "ipr/list_item.html" %} - {% endfor %} -
    Date SubmittedID #Title of IPR Disclosure
    - -

    Back to Top

    - -

    Specific IPR Disclosures

    -Please select the IPR disclosure that you wish to update.
    - Note: Updates to IPR disclosures must only be made by authorized - representatives of the original submitters. Updates will automatically - be forwarded to the current Patent Holder's Contact and to the Submitter - of the original IPR disclosure.
    -

    - - - - {% for ipr in specific_disclosures %} - {% include "ipr/list_item.html" %} - {% endfor %} -
    Date SubmittedID #Title of IPR Disclosure
    -
    -

    Back to Top

    - -

    Specific Third Party IPR Disclosures

    -Please select the IPR disclosure that you wish to update.
    - Note: Updates to IPR disclosures must only be made by authorized - representatives of the original submitters. Updates will automatically - be forwarded to the current Patent Holder's Contact and to the Submitter - of the original IPR disclosure.
    - -

    - - - - - {% for ipr in thirdpty_disclosures %} - {% include "ipr/list_item.html" %} - {% endfor %} -
    Date SubmittedID #Title of IPR Disclosure
    - -



    -{% endblock %} diff --git a/ietf/templates/liaisons/.gitignore b/ietf/templates/liaisons/.gitignore index c7013ced9..a74b07aee 100644 --- a/ietf/templates/liaisons/.gitignore +++ b/ietf/templates/liaisons/.gitignore @@ -1,2 +1 @@ /*.pyc -/settings_local.py diff --git a/ietf/templates/liaisons/field_help.html b/ietf/templates/liaisons/field_help.html index 22bc563b7..23f6bb30c 100755 --- a/ietf/templates/liaisons/field_help.html +++ b/ietf/templates/liaisons/field_help.html @@ -1,24 +1,26 @@ {# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} +{% extends "base.html" %}{% load ietf_filters %} {% block title %}Liaison Statement Management Tool Field Help{% endblock %} {% block content %} -

    Liaison Statement Management Tool
    Field Help
    -
    -The table below provides descriptions of the fields included in the +

    Liaison Statement Management Tool
    Field Help

    + +

    The table below provides descriptions of the fields included in the liaison statement submission form, and suggestions for completing them, where appropriate. Additional materials that may be useful in -completing this form can be found in the following documents: -

    +completing this form can be found in the following documents:

    + -For definitive information on generating liaison statements, please see RFC 4053 (BCP 103) -"Procedures for Handling Liaison Statements to and from the IETF."

    + +

    For definitive information on generating liaison statements, please +see RFC 4053 (BCP 103) "Procedures for Handling Liaison Statements to and from the IETF."

    + diff --git a/ietf/templates/liaisons/guide_from_ietf.html b/ietf/templates/liaisons/guide_from_ietf.html index 2a7a57b34..3c56a15c7 100755 --- a/ietf/templates/liaisons/guide_from_ietf.html +++ b/ietf/templates/liaisons/guide_from_ietf.html @@ -1,17 +1,22 @@ {# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} +{% extends "base.html" %}{% load ietf_filters %} {% block title %}Liaison Statements from the IETF - Guidelines for Completing the "Cc:" Field{% endblock %} {% block content %} -
    Liaison Statements from the IETF
    Guidelines for Completing the "Cc:" Field
    -
    -The individuals copied on a liaison statement that is sent BY the IETF to another Standards Development Organization (SDO) depend on the IETF entity that is sending the liaison statement. -The following table provides guidelines for completing the "Cc:" field of liaison statements that are sent by the IETF. -

    -For definitive information on generating liaison statements, please see:

    -
  • RFC 4052 (BCP 102), "IAB Processes for Management of IETF Liaison Relationships."
  • -
  • RFC 4053 (BCP 103), "Procedures for Handling Liaison Statements to and from the IETF."

  • +

    Liaison Statements from the IETF
    Guidelines for Completing the "Cc:" Field

    + +

    The individuals copied on a liaison statement that is sent BY the IETF to another Standards Development Organization (SDO) depend on the IETF entity that is sending the liaison statement. +The following table provides guidelines for completing the "Cc:" field of liaison statements that are sent by the IETF.

    + +

    For definitive information on generating liaison statements, please +see:

    + +
    Field
    @@ -36,19 +41,18 @@ For definitive information on generating liaison statements, please see:

    +
  • The IETF Working Group Chair(s) (if not the submitter)

    +
  • The IETF Area Director(s)

    +
  • The IETF Working Group Discussion List
  • - -{% include "mailinglists/list_summary.html" %} -{% if form.approver.is_hidden %} - -{% else %} -{{ form }} -{% endif %} -{% endblock %} diff --git a/ietf/templates/mailinglists/list_wizard_ListReqAuthorized.html b/ietf/templates/mailinglists/list_wizard_ListReqAuthorized.html deleted file mode 100644 index 63dea50d6..000000000 --- a/ietf/templates/mailinglists/list_wizard_ListReqAuthorized.html +++ /dev/null @@ -1,29 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/list_wizard_base.html" %} - -{% block mlform %} - -{% endblock %} diff --git a/ietf/templates/mailinglists/list_wizard_ListReqClose.html b/ietf/templates/mailinglists/list_wizard_ListReqClose.html deleted file mode 100644 index 0a3271e9b..000000000 --- a/ietf/templates/mailinglists/list_wizard_ListReqClose.html +++ /dev/null @@ -1,2 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% include "mailinglists/list_wizard_MailingListForm.html" %} diff --git a/ietf/templates/mailinglists/list_wizard_MailingListForm.html b/ietf/templates/mailinglists/list_wizard_MailingListForm.html deleted file mode 100644 index 1fab29f65..000000000 --- a/ietf/templates/mailinglists/list_wizard_MailingListForm.html +++ /dev/null @@ -1,54 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/list_wizard_base.html" %} - -{# javascript to go with the list administrators multi-form #} -{% if form.admins %} -{% block head %} - -{% endblock %} -{% endif %} - -{% block mlform %} -{% for field in form %} -{% if field.is_hidden %} -{# we assume that the only hidden form is the domain name #} -{# so don't render anything #} -{% else %} - - - - -{% endif %} -{% endfor %} -{% endblock %} diff --git a/ietf/templates/mailinglists/list_wizard_base.html b/ietf/templates/mailinglists/list_wizard_base.html deleted file mode 100644 index 7eb7ba0f4..000000000 --- a/ietf/templates/mailinglists/list_wizard_base.html +++ /dev/null @@ -1,55 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} - -{% block title %}IETF Mailing List Form{% endblock %} - -{% block css %} -ul.errorlist { color: red; border: 1px solid red; } -th { text-align: left; vertical-align: top; font-weight: normal;} -{% block mlcss %}{% endblock %} -{% endblock %} - -{% block head %} - -{% endblock %} - -{% block content %} - -
    Liaison Statements FROM the IETF
    • An IETF Area
    • An IETF Working Group
    (1) Please see Section 4., "Approval and Transmission of Liaison Statements," -of RFC 4052 for information on who may submit a liaison statement on behalf of an IETF entity, and who should be copied.

    +of RFC 4052 for information on who may submit a liaison statement on behalf of an IETF entity, and who should be copied.

    (2) The IETF Secretariat, <statements@ietf.org>, is automatically blind-copied on every liaison statement sent by the IETF.

    (3) Any addresses included in the "Response Contact" and "Technical Contact" fields of a liaison statement will also receive copies of the liaison statement.

    (4) This guideline does not apply when sending a liaison statement to an SDO where no formal liaison relationship exists between the IETF and that SDO.

    diff --git a/ietf/templates/liaisons/guide_to_ietf.html b/ietf/templates/liaisons/guide_to_ietf.html index 5bfd9f0ea..13146324e 100755 --- a/ietf/templates/liaisons/guide_to_ietf.html +++ b/ietf/templates/liaisons/guide_to_ietf.html @@ -1,15 +1,15 @@ {# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} +{% extends "base.html" %}{% load ietf_filters %} {% block title %}Liaison Statements to the IETF - Guidelines for Completing the "To:" and "Cc:" Fields{% endblock %} {% block content %} -
    Liaison Statements to the IETF
    Guidelines for Completing the "To:" and "Cc:" Fields
    -
    -The following table provides guidelines for completing the "To:" and "Cc:" fields of liaison statements that are sent TO the IETF by other Standards Development Organizations (SDOs). -For definitive information on generating liaison statements, please see RFC 4053 (BCP 103), -"Procedures for Handling Liaison Statements to and from the IETF."

    +

    Liaison Statements to the IETF
    Guidelines for Completing the "To:" and "Cc:" Fields

    + +

    The following table provides guidelines for completing the "To:" and "Cc:" fields of liaison statements that are sent TO the IETF by other Standards Development Organizations (SDOs). +For definitive information on generating liaison statements, please +see RFC 4053 (BCP 103) "Procedures for Handling Liaison Statements to and from the IETF."

    @@ -37,16 +37,16 @@ For definitive information on generating liaison statements, please see RFC 4053
  • The IESG <iesg@ietf.org>
  • - + - + +
  • The IETF Area Director(s)

    +
  • The IETF Working Group Discussion List
  • - - - -{% ifequal req "delete" %} - -{% else %} - - - - - - - - - - - -{% endifequal %} diff --git a/ietf/templates/mailinglists/list_summary.txt b/ietf/templates/mailinglists/list_summary.txt deleted file mode 100644 index 270548fd2..000000000 --- a/ietf/templates/mailinglists/list_summary.txt +++ /dev/null @@ -1,39 +0,0 @@ -Requestor: {{ list.requestor }} - -Requestor's email address: {{ list.requestor_email }} - -Email list name: {{ list.mlist_name }}@{{ list.domain }} - -{% ifequal req "delete" %} -Reason for closing list: {{ list.reason_to_delete }} -{% else %} -Short description of the email list: -{{ list.short_desc }} - -Long description of the email list: -{{ list.long_desc }} - -Administrator(s): -{{ list.admins }} - -Email address(es) of the initial subscriber(s) (optional): -{{ list.initial_members }} - -Welcome message for initial subscriber(s) (optional): -{{ list.welcome_message }} - -Welcome message for new subscriber(s) (optional): -{% firstof list.welcome_new "" %} - -Required steps for subscription: {{ list.get_subscription_display }} - -Messages to this list can be posted by: {{ list.get_post_who_display }} - -Administrator approval required for posts: {{ list.post_admin|yesno:"YES,NO" }} - -Private Archive: {{ list.archive_private|yesno:"YES,NO" }} - -Specific information about how to access and move the existing archive from a remote location (optional): -{{ list.archive_remote }} - -{% endifequal %} diff --git a/ietf/templates/mailinglists/list_toapprove.html b/ietf/templates/mailinglists/list_toapprove.html deleted file mode 100644 index d3cf139a6..000000000 --- a/ietf/templates/mailinglists/list_toapprove.html +++ /dev/null @@ -1,52 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} - -{% block title %}Confirmation of request to {% filter escape %}{% include "mailinglists/list_type_message2.txt" %}{% endfilter %}{% endblock %} - -{% block css %} -table { - margin:0; - padding:0; - font-family: Verdana, Arial, sans-serif; - font-size: 13px; - color: #022D66; - font-style: normal; - } -th { - font-weight: normal; - text-align: left; -} -{% endblock %} - -{% block content %} -{% if list.approved %} - This mailing list request has already been - {% ifequal list.approved 1 %} - approved and sent to the IETF Secretariat. - {% else %} - denied and the requestor has been notified. - {% endifequal %} -{% else %} - -
    • An IETF Area
    • An IETF Working Group
    (1) The IETF Secretariat, <statements@ietf.org>, is automatically blind-copied on every liaison statement sent to the IETF.
    (2) Any addresses included in the "Response Contact" and diff --git a/ietf/templates/liaisons/help.html b/ietf/templates/liaisons/help.html index cf2a672cf..e5bf792db 100644 --- a/ietf/templates/liaisons/help.html +++ b/ietf/templates/liaisons/help.html @@ -4,7 +4,7 @@ {% block title %}Liaison Statement Management Tool Help{% endblock %} {% block content %} -
    Liaison Statement Management Tool
    Help
    +

    Liaison Statement Management Tool Help

    Request to {% filter escape %}{% include "mailinglists/list_type_message2.txt" %}{% endfilter %}
    Requestor: {{ list.requestor|escape }}
    Requestor's email address: {{ list.requestor_email|urlize }}
    Email list name: {{ list.mlist_name }}@{{ list.domain }}
    Reason for closing list: {{ list.reason_to_delete|escape|linebreaksbr }}
    Short description of the email list: {{ list.short_desc|escape }}
    Long description of the email list: {{ list.long_desc|escape }}
    Administrator(s):
    {{ list.admins|escape }}
    Email address(es) of initial subscriber(s) (optional):
    {{ list.initial_members|escape }}
    Welcome message for initial subscriber(s) (optional): {{ list.welcome_message|linebreaksbr }}
    Welcome message for new subscriber(s) (optional): {% filter linebreaksbr %}{% firstof list.welcome_new "" %}{% endfilter %}
    Required steps for subscription: {{ list.get_subscription_display }}
    Messages to this list can be posted by:{{ list.get_post_who_display }}
    Administrator approval required for posts: {{ list.post_admin|yesno:"YES,NO" }}
    Private Archive: {{ list.archive_private|yesno:"YES,NO" }}
    Specific information about how to access and move the exiting archive from a remote location (optional): {{ list.archive_remote }}
    -
    - - - - - - -{% include "mailinglists/list_summary.html" %} -{{ form }} - -
    - -
    - -


    -
    - - - -{% endif %} -{% endblock %} diff --git a/ietf/templates/mailinglists/list_type_message.txt b/ietf/templates/mailinglists/list_type_message.txt deleted file mode 100644 index 48310a7de..000000000 --- a/ietf/templates/mailinglists/list_type_message.txt +++ /dev/null @@ -1,25 +0,0 @@ -{% ifequal list.mail_type 1 %} -Creation of the WG email list <{{ list.mlist_name }}@ietf.org> -{% else %} -{% ifequal list.mail_type 2 %} -Movement of the WG email list for {{ list.mlist_name }} to ietf.org -{% else %} -{% ifequal list.mail_type 3 %} -Movement of the non-WG email list {{ list.mlist_name }} to {{ list.domain }} -{% else %} -{% ifequal list.mail_type 4 %} -Creation of the non-WG email list <{{ list.mlist_name }}@{{ list.domain }}> -{% else %} -{% ifequal list.mail_type 5 %} -Closing the WG email list <{{ list.mlist_name }}@ietf.org> -{% else %} -{% ifequal list.mail_type 6 %} -Closing the non-WG email list <{{ list.mlist_name }}@{{ list.domain }}> -{% else %} -** programming error ** -{% endifequal %} -{% endifequal %} -{% endifequal %} -{% endifequal %} -{% endifequal %} -{% endifequal %} diff --git a/ietf/templates/mailinglists/list_type_message2.txt b/ietf/templates/mailinglists/list_type_message2.txt deleted file mode 100644 index 67712926c..000000000 --- a/ietf/templates/mailinglists/list_type_message2.txt +++ /dev/null @@ -1,25 +0,0 @@ -{% ifequal list.mail_type 1 %} -create the WG email list <{{ list.mlist_name }}@ietf.org> -{% else %} -{% ifequal list.mail_type 2 %} -move the WG email list for {{ list.mlist_name }} to ietf.org -{% else %} -{% ifequal list.mail_type 3 %} -move the non-WG email list {{ list.mlist_name }} to {{ list.domain }} -{% else %} -{% ifequal list.mail_type 4 %} -create the non-WG email list <{{ list.mlist_name }}@{{ list.domain }}> -{% else %} -{% ifequal list.mail_type 5 %} -close the WG email list <{{ list.mlist_name }}@ietf.org> -{% else %} -{% ifequal list.mail_type 6 %} -close the non-WG email list <{{ list.mlist_name }}@{{ list.domain }}> -{% else %} -** programming error ** -{% endifequal %} -{% endifequal %} -{% endifequal %} -{% endifequal %} -{% endifequal %} -{% endifequal %} diff --git a/ietf/templates/mailinglists/list_wizard.html b/ietf/templates/mailinglists/list_wizard.html deleted file mode 100644 index 331154685..000000000 --- a/ietf/templates/mailinglists/list_wizard.html +++ /dev/null @@ -1,6 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/list_wizard_base.html" %} - -{% block mlform %} -{{ form }} -{% endblock %} diff --git a/ietf/templates/mailinglists/list_wizard_ListApproval.html b/ietf/templates/mailinglists/list_wizard_ListApproval.html deleted file mode 100644 index a5f40704d..000000000 --- a/ietf/templates/mailinglists/list_wizard_ListApproval.html +++ /dev/null @@ -1,14 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/list_wizard_base.html" %} - -{% block mlform %} -

    Preview your Request

    -

    You will be the approver for this request.{{ form.approver }}
    -{% if form.authorized.errors %} -
      -{% for error in form.authorized.errors %} -
    • {{ error|escape }}
    • -{% endfor %} -
    -{% endif %} -{{ form.authorized }} - -{% ifequal form0.domain_name.data "iab.org" %} -I am a member of the IAB or am otherwise authorized to create or -close a mailing list at iab.org, or to move an existing list to -this domain. -{% else %} -{% ifequal form0.domain_name.data "irtf.org" %} -I am an active participant in the IRTF, or am otherwise authorized -to create or close a mailing list at irtf.org, or to move an existing -list to this domain. -{% else %} -I am authorized to perform this action. -{% endifequal %} -{% endifequal %} -
    {{ field.label_tag }}: -{% ifequal field.name "short_desc" %} - -{% endifequal %} -{% ifequal field.name "long_desc" %} - -{% endifequal %} - -{% if field.errors %} -
      {% for error in field.errors %}
    • {{ error|escape }}
    • {% endfor %}
    -{% endif %} -{% ifequal field.name "mlist_name" %} -{% if mlist_known %}{# if we know the mailing list name already #} -{{ form.initial.mlist_name }}@{{ form.initial.domain_name }}{{ field.as_hidden }} -{% else %} -{{ field }}@{{ form.initial.domain_name }} -{% endif %} -{{ form.domain_name.as_hidden }} -{% else %} -{{ field }} -{% endifequal %} -
    - -
    - - - - - - - -
    - -
    - -
    -
    - -

    Step {{ step|add:"1" }}:

    - -{% block mlform %} -form goes here -{% endblock %} -
    - - - - - {{ previous_fields }} - - -
    - - -
    - -{% endblock %} diff --git a/ietf/templates/mailinglists/list_wizard_done.html b/ietf/templates/mailinglists/list_wizard_done.html deleted file mode 100644 index 2b9b16492..000000000 --- a/ietf/templates/mailinglists/list_wizard_done.html +++ /dev/null @@ -1,26 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/list_approval_base.html" %} - -{% block innercontent %} -{% if requestor_is_approver %} -An email has been sent to you so you can confirm this request.
    -Please note that after your request has been approved by you,
    -{% else %} -Your request to {% filter escape %}{% include "mailinglists/list_type_message2.txt" %}{% endfilter %} -has been sent to
    -{{ list.auth_person }} for approval.
    -Please note that after your request has been approved by
    -{{ list.auth_person }}, -{% endif %} -{% ifequal req "delete" %} -your list will be closed within two business days.
    -{% else %} -your list will be created and the archives
    -will be tested.
    -You will receive a welcome E-mail containing your administrator's
    -password within two business days.
    -For security reasons we suggest that you change this password.
    -Please remember to forward this changed password to any other list
    -admins.
    -{% endifequal %} -{% endblock %} diff --git a/ietf/templates/mailinglists/list_wizard_done_email.txt b/ietf/templates/mailinglists/list_wizard_done_email.txt deleted file mode 100644 index d2a0c75fd..000000000 --- a/ietf/templates/mailinglists/list_wizard_done_email.txt +++ /dev/null @@ -1,8 +0,0 @@ -{% load ietf_filters %}{% filter wordwrap:"72" %} -The Secretariat has received a request from {{ list.requestor }} -to {% filter allononeline %}{% include "mailinglists/list_type_message2.txt" %}{% endfilter %}. -Please use the following URL to review and approve or deny this request: -{# https is only for production #}https://{{ site.domain }}{% url ietf.mailinglists.views.list_approve list.mailing_list_id %} -{% endfilter %} - -{% include "mailinglists/list_summary.txt" %} diff --git a/ietf/templates/mailinglists/list_wizard_step0.html b/ietf/templates/mailinglists/list_wizard_step0.html deleted file mode 100644 index fbb5b4f55..000000000 --- a/ietf/templates/mailinglists/list_wizard_step0.html +++ /dev/null @@ -1,85 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/list_wizard_base.html" %} - -{% block head %} - -{% endblock %} - -{% block body_attributes %}onLoad="activate_widgets()"{% endblock %} - -{% block mlform %} -

    WG email list -                       -{{ form.group }} -{% if form.group.errors %} -
      -{% for error in form.group.errors %} -
    • {{ error|escape }}
    • -{% endfor %} -
    -{% endif %} -

    - -{% if form.mail_type.errors %} -
      -{% for error in form.mail_type.errors %} -
    • {{ error|escape }}
    • -{% endfor %} -
    -{% endif %} -{{ form.mail_type_fields.0 }}
    -{{ form.mail_type_fields.1 }}
    -{{ form.mail_type_fields.2 }}
    - - -

    Non-WG email list

    -Select Domain Name: {{ form.domain_name }}
    -{{ form.mail_type_fields.3 }}
    -{{ form.mail_type_fields.4 }}
    -{{ form.mail_type_fields.5 }}
    -{{ form.list_to_close }} -{% if form.list_to_close.errors %} -
      -{% for error in form.list_to_close.errors %} -
    • {{ error|escape }}
    • -{% endfor %} -
    -{% endif %} -
    - - -Note: Only members of the IAB (or their designees) and active -participants in the IRTF may create or close a mailing list at iab.org and -irtf.org, respectively, or move an existing list to one of these domains." -

    - - -{% endblock %} diff --git a/ietf/templates/mailinglists/list_wizard_subject.txt b/ietf/templates/mailinglists/list_wizard_subject.txt deleted file mode 100644 index a782abc49..000000000 --- a/ietf/templates/mailinglists/list_wizard_subject.txt +++ /dev/null @@ -1 +0,0 @@ -Request to {% include "mailinglists/list_type_message2.txt" %} diff --git a/ietf/templates/mailinglists/nonwgmailinglist_list.html b/ietf/templates/mailinglists/nonwgmailinglist_list.html deleted file mode 100644 index e4d717b9b..000000000 --- a/ietf/templates/mailinglists/nonwgmailinglist_list.html +++ /dev/null @@ -1,132 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% load ietf_filters %} - -IETF Non-WG Mailing Lists - - - - -
    -

    IETF Non-WG Mailing Lists

    -

    This page attempts to list all the active, publicly visible lists that are -considered to be related to the IETF, but are not the main list of any working -group, in alphabetical order by list name.

    -

    It includes lists that are for discussion of particular topics, lists -that have belonged to former working groups but are still active, lists that -are specified in the registration rules of some parameter registry as -"forreview," and others of less clear classification.

    -

    Mail sent to all these lists is considered an "IETF Contribution" as defined -in RFC 3978, section 1., point C. (formerly known as the NOTE WELL -statement).

    - -

    Details of adding lists to this page can be found at the bottom of the -page.

    - - - - - - -{% for list in object_list %} - - - - - - - - -{% endfor %} - -
    List NameList Administrator(s)PurposeAreaTo Subscribe
    {% firstof list.list_name list.list_url %}{{ list.admin|make_one_per_line }}{{ list.purpose|truncatewords:"20"|escape }}{{ list.area }}{% if list.subscribe_url %} -{# There are lots of non-urls in the database #} -{{ list.subscribe_url|link_if_url }} -{% else %} -  -{% endif %} -{% if list.subscribe_other %} -
    -{{ list.subscribe_other }} -{% endif %} -
    - -

    To add a list, or to update or delete an existing entry, please go to the -IETF Non-WG Mailing List Posting Page. When adding a list, please provide the following information:

    -
      -
    • The name and email address of the submitter -
    • The name of the list -
    • The URL or email address of the list -
    • The name(s) and email address(es) of the list administrator(s) -
    • The purpose of the list -
    • The IETF Area to which the list belongs
    • -
    • The URL or other instructions for subscribing to the list
    - -

    An AD for the specified Area must approve the request to add, update, or delete a listing. -An AD can also request that the listing be changed or removed.

    -

    The following types of lists SHOULD be added:

    -
      -
    • Directorates (see also the list of - directorates) -
    • Review lists specified in IANA procedures -
    • Lists of former WGs that wish to remain active -
    • Lists that are created with the intent to form a working group (with AD - approval) -
    • Lists that are created for a subtask of a working group
    -

    The following types of lists SHOULD NOT be added:

    - -

    These guidelines are intended to make the list useful and easy to -maintain.

    -

    This list is maintained by the IETF Secretariat. The guidelines are set by -the IESG.

    - -

     

    -

     

    -

     

    -
    - - diff --git a/ietf/templates/mailinglists/nwg_wizard.html b/ietf/templates/mailinglists/nwg_wizard.html deleted file mode 100644 index 47d50f89b..000000000 --- a/ietf/templates/mailinglists/nwg_wizard.html +++ /dev/null @@ -1,25 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/nwg_wizard_base.html" %} - -{% block nwgcontent %} -View Current list
    -

    -

    Step {{ step|add:"1" }}

    -
    - -{{ previous_fields }} - -
    - - - -{{ form }} -
    -

    Please select approving Area Director:

    -
    -
    - -
    - - -{% endblock %} diff --git a/ietf/templates/mailinglists/nwg_wizard_addedit_step1.html b/ietf/templates/mailinglists/nwg_wizard_addedit_step1.html deleted file mode 100644 index 5d43f3084..000000000 --- a/ietf/templates/mailinglists/nwg_wizard_addedit_step1.html +++ /dev/null @@ -1,29 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/nwg_wizard_base.html" %} - -{% block nwgcss %} -tr > th { text-align: left; vertical-align: top; } -{% endblock %} - -{% block nwgcontent %} -

    Step {{ step|add:"1" }}

    -

    Please provide the following information:

    - -
    - - -
    - -{{ form }} - - - - -
    - -
    -
    -{{ previous_fields }} - -
    -{% endblock %} diff --git a/ietf/templates/mailinglists/nwg_wizard_addedit_step3.html b/ietf/templates/mailinglists/nwg_wizard_addedit_step3.html deleted file mode 100644 index a22d0b41f..000000000 --- a/ietf/templates/mailinglists/nwg_wizard_addedit_step3.html +++ /dev/null @@ -1,47 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/nwg_wizard_base.html" %} - -{% block nwgcontent %} -

    Step {{ step|add:"1" }}

    -

    Please verify the following information:

    -
    - -
    - - - - - - - - - - - - - - - - - - - -
    -

    Request Submit Confirmation

    -Please review the following information that you are about to submit.
    -Once you click the 'Submit' button below, this request will be sent to -the selected Area Director for approval.
    -
    -
    Request Type: -{% ifequal clean_forms.0.add_edit.data "add" %} -Adding a new entry -{% else %} -Editing an existing entry -{% endifequal %}
    Submitter's Name:{{ clean_forms.1.s_name.data|escape }}
    Submitter's Email Address:{{ clean_forms.1.s_email.data|escape }}
    Mailing List Name:{{ clean_forms.1.list_name.data|escape }}
    URL or Email Address of Mailing List:
    {{ clean_forms.1.list_url.data|escape }}
    URL to Subscribe:
    {% firstof clean_forms.1.subscribe_url.data "Not Applicable" %}
    Other Info. to Subscribe:
    {{ clean_forms.1.subscribe_other.data|escape }}
    Administrator(s)' Email Address(es):
    {{ clean_forms.1.admin.data|escape|linebreaks }}
    Purpose: {{ clean_forms.1.purpose.data|escape }}
    Area:
    {{ area }}
    Approving Area Director:
    {{ approver|escape }}
    - -
    -
    -{{ previous_fields }} - -
    -{% endblock %} diff --git a/ietf/templates/mailinglists/nwg_wizard_base.html b/ietf/templates/mailinglists/nwg_wizard_base.html deleted file mode 100644 index d7e300d9a..000000000 --- a/ietf/templates/mailinglists/nwg_wizard_base.html +++ /dev/null @@ -1,26 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} - -{% block title %}IETF Non WG Mailing List Submit Form{% endblock %} - -{% block css %} -ul.errorlist { color: red; border: 1px solid red; } -{% block nwgcss %}{% endblock %} -{% endblock %} -{% block head %} -{% block javascript %}{% endblock %} - -{% endblock %} - -{% block content %} -
    -
    - - -
    -{% block nwgcontent %} -form goes here -{% endblock %} -
    - -{% endblock %} diff --git a/ietf/templates/mailinglists/nwg_wizard_delete_step2.html b/ietf/templates/mailinglists/nwg_wizard_delete_step2.html deleted file mode 100644 index a3044d1a5..000000000 --- a/ietf/templates/mailinglists/nwg_wizard_delete_step2.html +++ /dev/null @@ -1,45 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/nwg_wizard_base.html" %} - -{% block nwgcontent %} -

    Step {{ step|add:"1" }}

    -

    Please verify the following information:

    -
    - -
    - - - - - - - - - - - - - - - - - - - - -
    -

    Request Submit Confirmation

    -Please review the following information that you are about to submit.
    -Once you click the 'Submit' button below, this request will be sent to -the selected Area Director for approval.
    -
    -
    Request Type: -Deleting an existing entry -
    Submitter's Name:{{ clean_forms.1.ds_name.data|escape }}
    Submitter's Email Address:{{ clean_forms.1.ds_email.data|escape }}
    Mailing List Name:{{ list_q.list_name|escape }}
    URL or Email Address of Mailing List:
    {{ list_url|escape }}
    URL to Subscribe:
    {% firstof list_q.subscribe_url "Not Applicable" %}
    Other Info. to Subscribe:
    {{ list_q.subscribe_other|escape }}
    Administrator(s)' Email Address(es):
    {{ list_q.admin|escape|linebreaks }}
    Purpose: {{ list_q.purpose|escape }}
    Area:
    {{ list_q.area }}
    Approving Area Director:
    {{ approver|escape }}
    Message to AD:
    {{ clean_forms.1.msg_to_ad.data|escape }}
    - -
    -
    -{{ previous_fields }} - -
    -{% endblock %} diff --git a/ietf/templates/mailinglists/nwg_wizard_done.html b/ietf/templates/mailinglists/nwg_wizard_done.html deleted file mode 100644 index d4c173869..000000000 --- a/ietf/templates/mailinglists/nwg_wizard_done.html +++ /dev/null @@ -1,43 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/nwg_wizard_base.html" %} - -{% block nwgcontent %} -View Current list
    -

    -

    -Your request to {% ifequal add_edit "edit" %}edit{% else %}{% ifequal add_edit "add" %}add{% else %}delete{% endifequal %}{% endifequal %} the entry for the Non-WG Mailing List that is described below has been sent to the following Area Director for approval:

    -

    -{{ approver|escape }} -

    - -{% if old %} - - - - - - - - - - - - -{% endif %} - - - - - - - - - -{% ifequal add_edit "delete" %} - -{% endifequal %} - - -
    Current Entry
    Submitter's Name: {{ old.s_name }}
    Submitter's Email Address: {{ old.s_email }}
    List Name: {{ old.list_name }}
    URL or Email Address of Mailing List: {{ old.list_url }}
    URL to Subscribe: {{ old.subscribe_url }}
    Other Info. to Subscribe: {{ old.subscribe_other }}
    Administrator(s)' Email Address(es): {{ old.admin }}
    Purpose: {{ old.purpose }}
    Area: {{ old.area }}
     
    Revised Entry
    Submitter's Name: {% ifequal add_edit "delete" %}{{ list.ds_name }}{% else %}{{ list.s_name }}{% endifequal %}
    Submitter's Email Address: {% ifequal add_edit "delete" %}{{ list.ds_name }}{% else %}{{ list.s_email }}{% endifequal %}
    Mailing List Name: {{ list.list_name }}
    URL or Email Address of Mailing List: {{ list.list_url }}
    URL to Subscribe: {{ list.subscribe_url }}
    Other Info. to Subscribe: {{ list.subscribe_other }}
    Administrator(s)' Email Address(es): {{ list.admin }}
    Purpose: {{ list.purpose }}
    Area: {{ list.area }}
    Message from submitter: {{ list.msg_to_ad }}
     
    Approving Area Director: {{ approver|escape }}
    - -{% endblock %} diff --git a/ietf/templates/mailinglists/nwg_wizard_done_email.txt b/ietf/templates/mailinglists/nwg_wizard_done_email.txt deleted file mode 100644 index 74cb0aca2..000000000 --- a/ietf/templates/mailinglists/nwg_wizard_done_email.txt +++ /dev/null @@ -1,36 +0,0 @@ -The Secretariat has received a request to {% ifequal add_edit "edit" %}edit an existing entry on{% else %}{% ifequal add_edit "add" %}add a new entry to{% else %}delete an existing entry from{% endifequal %}{% endifequal %} -the "IETF Non-WG Mailing Lists" Web Page, https://datatracker.ietf.org/list/nonwg/ -The details of the request are provided below. - -Please approve or deny this request via the "IETF Non-WG Mailing List Approval Page," -https://datatracker.ietf.org/cgi-bin/nwg_list_approve.cgi?id={{ list.id }}&old_id={% firstof old.id "0" %} -You can use the same user name and password that you use with the I-D Tracker. -------------------------- -{% if old %} -Current Entry: - -Submitter's Name: {{ old.s_name }} -Submitter's Email Address: {{ old.s_email }} -List Name: {{ old.list_name }} -URL or Email Address of Mailing List: {{ old.list_url }} -URL to Subscribe: {{ old.subscribe_url }} -Other Info. to Subscribe: {{ old.subscribe_other }} -Administrator(s)' Email Address(es): {{ old.admin }} -Purpose: {{ old.purpose }} -Area: {{ old.area }} - -Revised Entry: -{% endif %} -Submitter's Name: {% ifequal add_edit "delete" %}{{ list.ds_name }}{% else %}{{ list.s_name }}{% endifequal %} -Submitter's Email Address: {% ifequal add_edit "delete" %}{{ list.ds_name }}{% else %}{{ list.s_email }}{% endifequal %} -Mailing List Name: {{ list.list_name }} -URL or Email Address of Mailing List: {{ list.list_url }} -URL to Subscribe: {{ list.subscribe_url }} -Other Info. to Subscribe: {{ list.subscribe_other }} -Administrator(s)' Email Address(es): {{ list.admin }} -Purpose: {{ list.purpose }} -Area: {{ list.area }} -{% ifequal add_edit "delete" %} -Message from submitter: -{{ list.msg_to_ad }} -{% endifequal %} diff --git a/ietf/templates/mailinglists/nwg_wizard_step0.html b/ietf/templates/mailinglists/nwg_wizard_step0.html deleted file mode 100644 index e996cd3f4..000000000 --- a/ietf/templates/mailinglists/nwg_wizard_step0.html +++ /dev/null @@ -1,51 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "mailinglists/nwg_wizard_base.html" %} -{% block javascript %} - -{% endblock %} - -{% block nwgcontent %} -

    Please use this Web tool to add a new entry to the IETF Non-WG Mailing Lists Web page, to update the information on an existing entry, or to delete an existing entry.

    -View Current list
    - -

    -

    Step 1

    -

    Please select one:

    -
    -{{ form.add_edit_fields.0 }}
    -{{ form.add_edit_fields.1 }}{{ form.list_id }}
    -{{ form.add_edit_fields.2 }}{{ form.list_id_delete }}
    - - -
    -{% endblock %} diff --git a/ietf/templates/mailinglists/nwg_wizard_subject.txt b/ietf/templates/mailinglists/nwg_wizard_subject.txt deleted file mode 100644 index 8a6331bb8..000000000 --- a/ietf/templates/mailinglists/nwg_wizard_subject.txt +++ /dev/null @@ -1,9 +0,0 @@ -{% ifequal add_edit "edit" %} -Request to Edit an Existing Entry on the Non-WG Mailing List Web Page -{% else %} - {% ifequal add_edit "add" %} - Request to Add a New Entry to the Non-WG Mailing List Web Page - {% else %} - Request to Delete an Existing Entry on the Non-WG Mailing List Web Page - {% endifequal %} -{% endifequal %} diff --git a/ietf/templates/mailinglists/wgwebmail_list.html b/ietf/templates/mailinglists/wgwebmail_list.html new file mode 100644 index 000000000..5f1471f9c --- /dev/null +++ b/ietf/templates/mailinglists/wgwebmail_list.html @@ -0,0 +1,31 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2008, All Rights Reserved #} + +{% block title %}Web-based Working Group E-mail Archives{% endblock %} + +{% block content %} +

    Web-based Working Group E-mail Archives

    + +

    These links to the Web-based working group e-mail archives are + extracted from the working group charters. Please consult the + charters for more information about the mailing lists and archives + of specific working groups. Charters for active working groups are + available on + the Active + IETF Working Groups Web page. Charters for concluded working + groups are available on + the Concluded + Working Groups Web page.

    + + + + + +{% for wg in object_list|dictsort:"group_acronym.acronym" %} + + + + +{% endfor %} +
    AcronymName
    {{ wg|escape }}{{ wg.group_acronym.name|escape }}
    +{% endblock %} diff --git a/ietf/templates/meeting/.gitignore b/ietf/templates/meeting/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/ietf/templates/meeting/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/ietf/templates/meeting/agenda.html b/ietf/templates/meeting/agenda.html index c546509f8..16b814fcb 100644 --- a/ietf/templates/meeting/agenda.html +++ b/ietf/templates/meeting/agenda.html @@ -1,97 +1,120 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} {% extends "base.html" %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} {% load humanize %} -{% block title %} Meeting Agenda of the {{ meeting_num|ordinal }} IETF Meeting{% endblock %} -{% block body_attributes %}background="http://www.ietf.org/meetings/peachbkg.gif"{% endblock %} +{% block title %}IETF {{ meeting.num }} Meeting Agenda{% endblock %} + +{% block morecss %} +table#agenda { border: 0; border-collapse:collapse; } +#agenda td { padding-right:2em; } +#agenda tr.meeting-date td { padding-top:1em; padding-right:0;} + +.ietf-agenda-palette { border-collapse:collapse; border:2px solid black; background:white; overflow:hidden; } +.ietf-agenda-palette td { border:1px solid black; } +.ietf-agenda-palette td { padding: 4px; text-align:center;} +.ietf-agenda-palette td a { text-decoration:none; } +.bgnone {} +.bgaqua, .bgaqua a { background-color: aqua; color: black; } +.bgblue, .bgblue a { background-color: blue; color: black; } +.bgfuchsia, .bgfuchsia a { background-color: fuchsia; color: black; } +.bggray, .bggray a { background-color: gray; color: white; } +.bggreen, .bggreen a { background-color: green; color: white; } +.bglime, .bglime a { background-color: lime; color: black; } +.bgmaroon, .bgmaroon a { background-color: maroon; color: white; } +.bgnavy, .bgnavy a { background-color: navy; color: white; } +.bgolive, .bgolive a { background-color: olive; color: white; } +.bgpurple, .bgpurple a { background-color: purple; color: white; } +.bgred, .bgred a { background-color: red; color: black; } +.bgsilver, .bgsilver a { background-color: silver; color: black; } +.bgteal, .bgteal a { background-color: teal; color: white; } +.bgwhite, .bgwhite a { background-color: white; color: black; } +.bgyellow, .bgyellow a { background-color: yellow; color: black; } +.bgblack, .bgblack a { background-color: black; color: white; } +{% endblock morecss %} + +{% block pagehead %} + +{% endblock pagehead %} +{% block bodyAttrs %}onload='updateAgendaColors()'{% endblock %} {% block content %} +

    IETF {{ meeting.num }} Meeting Agenda

    +

    {{ meeting.city }}, {{ meeting.start_date|date:"F j" }}-{% ifnotequal meeting.start_date.month meeting.end_date.month %}{{ meeting.end_date|date:"F " }}{% endifnotequal %}{{ meeting.end_date|date:"j, Y" }}
    +Updated {{ update.updated|date:"Y-m-d H:i:s T" }}
    +(There's also a plaintext agenda and a tools-style agenda available)

    -
    - Agenda of the {{ meeting_num|ordinal }} IETF Meeting
    -{{ meeting_info.start_date|date:"F j" }}-{% ifnotequal meeting_info.start_date|date:"F" meeting_info.end_date|date:"F" %}{{ meeting_info.end_date|date:"F " }}{% endifnotequal %}{{ meeting_info.end_date|date:"j, Y" }}
    -Updated as of {{ last_update_info.updated_date }} {{ last_update_info.updated_time }} (ET) -
    +

    IETF agendas are subject to change, up to and during the meeting.

    -( Text Format of the Agenda )
    -

    -*** Click on an acronym of the group to get a charter page ***
    -*** Click on a name of the group to get a meeting agenda ***
    -

    -

    {{ meeting_info.start_date|date:"l"|upper }}, {{ meeting_info.start_date|date:"F j, Y" }}
    -{{ nonsession_info.0.time_desc }} {{ nonsession_info.0.non_session_ref }} - {{ meetingvenue_info.reg_area_name }} -
    -{% for item in qs_sun %} -{{ item.sched_time_id1.time_desc }} {{ item.acronym_name }} - {{ item.sched_room_id1.room_name }}
    -{% endfor %} -
    -{% regroup object_list by day_id as days %} -{% for day in days %} - {{ day.list.0.meeting_date|date:"l"|upper }}, {{ day.list.0.meeting_date|date:"F j, Y" }}
    - {% if day.list.0.reg_info %}{{ day.list.0.reg_info }} - {{ meetingvenue_info.reg_area_name }}
    {% endif %} - {% ifequal day.list.0.day_id "5" %} - {{ day.list.0.fbreak_info }} - {{ meetingvenue_info.break_area_name }}
    +{# cache this part for 15 minutes -- it takes 3-6 seconds to generate #} +{% load cache %} +{% cache 900 ietf_meeting_agenda meeting.num %} + + +{% for slot in timeslots %} + {% ifchanged %} + + + + {% if slot.reg_info %} + + + + {% endif %} + {% endifchanged %} + {% if slot.session_name %} + {% if slot.break_info %} + + + + {% endif %} + + + + {% if not slot.is_plenary %} + {% for session in slot.sessions_by_area|dictsort:"area" %} + + + {% if session.info.area %} + + + + {% endif %} + + {% endfor %} + {% endif %} {% else %} - {{ day.list.0.morning_br_info }} - {{ meetingvenue_info.break_area_name }}
    - {% endifequal %} -
    - {% for item in day.list %} - {% ifequal item.session_name_id 3 %} - {{ day.list.0.lunch_br_info }} Break
    - {% endifequal %} - {% ifequal item.session_name_id 4 %} - {% if day.list.0.an_br1_info %} - {{ day.list.0.an_br1_info }} - {{ meetingvenue_info.break_area_name }}
    - {% endif %} - {% endifequal %} - {% ifequal item.session_name_id 5 %} - {% if day.list.0.an_br2_info %} - {{ day.list.0.an_br2_info }} - {{ meetingvenue_info.break_area_name }}
    - {% endif %} - {% endifequal %} - {% ifequal item.session_name_id 8 %} - {% if day.list.0.an_br2_info %} - {{ day.list.0.an_br2_info }} - {{ meetingvenue_info.break_area_name }}
    - {% endif %} - {% endifequal %} - {% ifequal item.session_name_id 10 %} - {% if day.list.0.an_br2_info %} - {{ day.list.0.an_br2_info }} - {{ meetingvenue_info.break_area_name }}
    - {% endif %} - {% endifequal %} - {{ item.time_desc }} {{ item.session_name }} - {% ifequal item.sessions.0.acronym "plenaryw" %} - - {{ item.sessions.0.room_id.room_name }}
    -
    {{ plenaryw_agenda|escape }}
    - {% else %} - {% ifequal item.sessions.0.acronym "plenaryt" %} - - {{ item.sessions.0.room_id.room_name }}
    -
    {{ plenaryt_agenda|escape }}
    - {% else %} -
    +

    {{ slot.meeting_date|date:"l"|upper }}, {{ slot.meeting_date|date:"F j, Y" }}

    +
    + {{ slot.registration.time_desc }} {{ slot.registration.name }} + {% if venue.reg_area_name %} - {{ venue.reg_area_name|escape }}{% endif %} +
    +
    + {{ slot.break_info.time_desc }} {{ slot.break_info.name }} + {% if venue.break_area_name and slot.break_info.show_break_location %} - {{ venue.break_area_name|escape }}{% endif %} +
    + {{ slot.time_desc }} {{ slot.session_name }} + {% ifequal slot.sessions.0.acronym "plenaryw" %} + - {{ slot.sessions.0.room_id.room_name }}
    +
    {{ plenaryw_agenda|escape }}
    + {% endifequal %} + {% ifequal slot.sessions.0.acronym "plenaryt" %} + - {{ slot.sessions.0.room_id.room_name }}
    +
    {{ plenaryt_agenda|escape }}
    + {% endifequal %} +
    {{ session.info.room_id.room_name}}{{ session.info.area|upper}}{% if session.info.isWG %}{{ session.info.acronym|lower }}{% else %}{{ session.info.acronym|lower }}{% endif %} + + {% if session.info.agenda_file %}{{ session.info.acronym_name|escape }} {{ session.info.group_type_str }}{% else %}{{ session.info.acronym_name|escape }} {{ session.info.group_type_str }}{% endif %} + {% if session.info.special_agenda_note %}
    - {{ session.info.special_agenda_note }}{% endif %} +
    - {% for session in item.sessions|dictsort:"area" %} - - {% endfor %} -
    {{ session.room_id.room_name }}{{ session.area|upper }}{% if session.isWG %}{{ session.acronym|lower }}{% else %}{{ session.acronym|lower }}{% endif %}{% if session.agenda_file %}{{ session.acronym_name }} {{ session.group_type_str }}{% else %}{{ session.acronym_name }} {{ session.group_type_str }}{% endif %}{% if session.special_agenda_note %} - {{ session.special_agenda_note }}{% endif %}
    - {% endifequal %} - {% endifequal %} -
    - {% endfor %} -
    -{% endfor %} -


    -
    AREA DIRECTORS

    - -{% regroup qs_ads by area as ads %} -{% for ad in ads %} - - - - - + {% for session in slot.sessions %} + + + + {% endfor %} + {% endif %} {% endfor %}
    {{ ad.grouper|upper }}{{ ad.list.0.area.area_acronym.name }} - {% for ad_person in ad.list %} - {% ifequal forloop.counter 2 %} & {% endifequal %} - {{ ad_person.person }}/{{ ad_person.person.affiliation }} - {% endfor %} -
    + {{ slot.time_desc }} {{ session.acronym_name|escape }} - {{ session.room_id.room_name}} +
    + +{% endcache %} + {% endblock %} diff --git a/ietf/templates/meeting/agenda.txt b/ietf/templates/meeting/agenda.txt index e9deba219..9372f329b 100644 --- a/ietf/templates/meeting/agenda.txt +++ b/ietf/templates/meeting/agenda.txt @@ -1,40 +1,27 @@ - -{% load humanize %} - -Agenda of the {{ meeting_num|ordinal }} IETF Meeting -

    -

    {# From here on out, horizontal and vertical whitespace is significant... #}
    -{% filter center:72 %}Agenda of the {{ meeting_num|ordinal }} IETF Meeting{% endfilter %}
    -{% filter center:72 %}{{ meeting_info.start_date|date:"F j" }}-{% ifnotequal meeting_info.start_date|date:"F" meeting_info.end_date|date:"F" %}{{ meeting_info.end_date|date:"F " }}{% endifnotequal %}{{ meeting_info.end_date|date:"j, Y" }}{% endfilter %}
    +{% load humanize %}{% autoescape off %}
    +{% load ietf_filters %}
    +{% filter center:72 %}Agenda of the {{ meeting.num|ordinal }} IETF Meeting{% endfilter %}
    +{% filter center:72 %}{{ meeting.start_date|date:"F j" }}-{% ifnotequal meeting.start_date.month meeting.end_date.month %}{{ meeting.end_date|date:"F " }}{% endifnotequal %}{{ meeting.end_date|date:"j, Y" }}{% endfilter %}
    +{% filter center:72 %}Updated {{ update.updated|date:"Y-m-d H:i:s T" }}{% endfilter %}
     
    -Updated as of {{ last_update_info.updated_date }} {{ last_update_info.updated_time }} (ET) 
    +{% filter center:72 %}IETF agendas are subject to change, up to and during the meeting.{% endfilter %}
     
    -{{ meeting_info.start_date|date:"l"|upper }}, {{ meeting_info.start_date|date:"F j, Y" }}
    -{{ nonsession_info.0.time_desc }} {{ nonsession_info.0.non_session_ref }} - {{ meetingvenue_info.reg_area_name }}
    -{% for item in qs_sun %}{{ item.sched_time_id1.time_desc }} {{ item.acronym_name }} - {{ item.sched_room_id1.room_name }}
    -{% endfor %}
    +{% for slot in timeslots %}{% ifchanged  %}
     
    -{% regroup object_list by day_id as days %}{% for day in days %}
    -{{ day.list.0.meeting_date|date:"l"|upper }}, {{ day.list.0.meeting_date|date:"F j, Y" }}
    -{% if day.list.0.reg_info %}{{ day.list.0.reg_info }} - {{ meetingvenue_info.reg_area_name }}
    -{% endif %}{% ifequal day.list.0.day_id "5" %}{{ day.list.0.fbreak_info }} - {{ meetingvenue_info.break_area_name }}{% else %}{{ day.list.0.morning_br_info }} - {{ meetingvenue_info.break_area_name }}
    -{% endifequal %}{% for item in day.list %}{% ifequal item.session_name_id 3 %}{{ day.list.0.lunch_br_info }} Break
    -{% endifequal %}{% ifequal item.session_name_id 4 %}{{ day.list.0.an_br1_info }} - {{ meetingvenue_info.break_area_name }}
    -{% endifequal %}{% ifequal item.session_name_id 5 %}{{ day.list.0.an_br2_info }} - {{ meetingvenue_info.break_area_name }}
    -{% endifequal %}{{ item.time_desc }} {{ item.session_name }}{% ifequal item.sessions.0.acronym "plenaryw" %}{{ item.sessions.0.room_id.room_name }}
    -{{ plenaryw_agenda|escape }}{% else %}{% ifequal item.sessions.0.acronym "plenaryt" %}{{ item.sessions.0.room_id.room_name }}
    -{{ plenaryt_agenda|escape }}{% else %}{% for session in item.sessions|dictsort:"area" %}
    -{{ session.room_id.room_name|ljust:16 }}  {{ session.area|upper|ljust:4 }} {{ session.acronym|ljust:7 }} 	{{ session.acronym_name }} {{ session.group_type_str }}{% if session.special_agenda_note %} - {{ session.special_agenda_note }}{% endif %}{% endfor %}
    -{% endifequal %}{% endifequal %}
    -{% endfor %}
    -{% endfor %}
    +{{ slot.meeting_date|date:"l"|upper }}, {{ slot.meeting_date|date:"F j, Y" }}{% if slot.reg_info %}
    +{{ slot.registration.time_desc  }}  {{ slot.registration.name  }}{% if venue.reg_area_name %} - {{ venue.reg_area_name }}{% endif %}{% endif %}
    +{% endifchanged %}{% if slot.session_name %}{% if slot.break_info %}{{ slot.break_info.time_desc }}  {{ slot.break_info.name }}{% if venue.break_area_name and slot.break_info.show_break_location %} - {{ venue.break_area_name }}{% endif %}
    +{% endif %}{{ slot.time_desc               }}  {{ slot.session_name }}{% if slot.is_plenary %} - {{ slot.sessions.0.room_id.room_name }}{% endif %}{% ifequal slot.sessions.0.acronym "plenaryw" %}
    +
    +{{ plenaryw_agenda }}{% endifequal %}{% ifequal slot.sessions.0.acronym "plenaryt" %}
    +
    +{{ plenaryt_agenda }}{% endifequal %}{% if not slot.is_plenary %}{% for session in slot.sessions_by_area|dictsort:"area" %}
    +{{ session.info.room_id.room_name|ljust:14 }}  	{% if session.info.area %}{{ session.info.area|upper|ljust:4 }}	{{ session.info.acronym|ljust:10 }}  	{% endif %}{{ session.info.acronym_name }} {{ session.info.group_type_str }}{% if session.info.special_agenda_note %} - {{ session.info.special_agenda_note }}{% endif %}{% endfor %}{% endif %}
    +
    +{% else %}{% for session in slot.sessions %}{{ slot.time_desc }}  {{ session.acronym_name }} - {{ session.room_id.room_name }}
    +{% endfor %}{% endif %}{% endfor %}
     ====================================================================
     AREA DIRECTORS
    -{% regroup qs_ads by area as ads %}{% for ad in ads %}
    -{{ ad.grouper|upper|ljust:5 }}{{ ad.list.0.area.area_acronym.name|ljust:18 }}  {% for ad_person in ad.list %}{% ifequal forloop.counter 2 %} & {% endifequal %}{{ ad_person.person }}/{{ ad_person.person.affiliation }}{% endfor %}{% endfor %}
    -
    - - +{% regroup ads by area as grouped %}{% for ad in grouped %} +{{ ad.grouper|upper|ljust:5 }}{{ ad.list.0.area.area_acronym.name|slice:":18"|ljust:18 }} {% for ad_person in ad.list %}{% ifequal forloop.counter 2 %} & {% endifequal %}{{ ad_person.person }}/{{ ad_person.person.affiliation }}{% endfor %}{% endfor %} +{% endautoescape %} diff --git a/ietf/templates/meeting/index.html b/ietf/templates/meeting/index.html deleted file mode 100644 index 99ae9011d..000000000 --- a/ietf/templates/meeting/index.html +++ /dev/null @@ -1 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} diff --git a/ietf/templates/meeting/list.html b/ietf/templates/meeting/list.html index aa13b570a..19a7ab39a 100644 --- a/ietf/templates/meeting/list.html +++ b/ietf/templates/meeting/list.html @@ -1,133 +1,71 @@ +{% extends "base.html" %}{% load ietf_filters %} {# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} -{% block title %} IETF {{ meeting_num }} Preliminary & Interim Materials {% endblock %} +{% block title %}IETF {{ meeting_num }} Preliminary & Interim Materials{% endblock %} +{% block morecss %} +table.ietf-materials { width: 99%; border-bottom:1px solid #cbcbcb; nopadding: 0; margin: 0; vertical-align: top; border-collapse: collapse;} +table.ietf-materials tr {vertical-align: top; } +table.ietf-materials td { padding:0.5em 0; } +{% endblock morecss %} + {% block content %} - -
    -

    IETF {{ meeting_num }} Meeting Materials

    +

    IETF {{ meeting_num }} Meeting Materials

    + {% if sub_began %} -

    Submission began {{ begin_date|date:"F j, Y" }}

    -

    Submission cutoff odate: {{ cut_off_date|date:"F j, Y" }}

    -

    Corrections to submissions cutoff date:{{ cor_cut_off_date|date:"F j, Y" }}

    +

    Submission cutoff date: {{ cut_off_date|date:"F j, Y" }}
    +Corrections to submissions cutoff date: {{ cor_cut_off_date|date:"F j, Y" }}

    {% endif %} -Updated as of {% now "F j, Y, H:i:s (T)" %} -

    + +

    Meeting Materials Manager (for session chairs only; password required)

    + +{# cache for 15 minutes, as long as there's no proceedings activity. takes 4-8 seconds to generate. #} +{% load cache %} +{% cache 900 ietf_meeting_materials meeting_num cache_version %} {% regroup object_list|dictsort:"area" by area_name as areas %} {% for wgs in areas %} - - - - -


    {{ wgs.grouper }}

    -
    +

    {{ wgs.grouper }}

    {% ifequal wgs.grouper "Plenary Sessions" %} {% for wg in wgs.list|dictsortreversed:"acronym" %} - - - - - -
    {{ wg.acronym|upper }}{% ifequal wg.group_type_str "BOF" %} ({{ wg.group_type_str }}) {% endifequal %}
    - {% if wg.agenda_file %}[Agenda]{% endif %}{% if wg.minute_file %} [Minutes]{% endif %}
    -{% include "meeting/submission_status_snippet.html" %} - {% for slide in wg.slides %} - - - {% endfor %} -
    {{ slide.slide_name }}
    -
    - + {% include "meeting/list_group.html" %} {% endfor %} {% else %} {% for wg in wgs.list|dictsort:"acronym" %} - - - - - -
    {{ wg.acronym|upper }}{% ifequal wg.group_type_str "BOF" %} ({{ wg.group_type_str }}) {% endifequal %}
    - {% if wg.agenda_file %}[Agenda]{% endif %}{% if wg.minute_file %} [Minutes]{% endif %}
    -{% include "meeting/submission_status_snippet.html" %} - {% for slide in wg.slides %} - - - {% endfor %} -
    {{ slide.slide_name }}
    -
    - + {% include "meeting/list_group.html" %} {% endfor %} {% endifequal %} {% endfor %} - + {% if training_list %} - - -


    Training

    -
    +

    Training

    {% for wg in training_list|dictsort:"acronym" %} - - - - -
    {{ wg.acronym|upper }} -{% include "meeting/submission_status_snippet.html" %} - {% for slide in wg.slides %} - - {% endfor %} -
    {{ slide.slide_name }}
    -
    + + + +
    {{ wg.acronym|upper }}{% ifequal wg.group_type_str "BOF" %} ({{ wg.group_type_str }}) {% endifequal %}
    +{% for slide in wg.slides %} +{{ slide.slide_name|clean_whitespace }}
    +{% endfor %} +
    {% endfor %} {% endif %} - - + + {% if irtf_list %} - - - - -


    IRTF

    -
    +

    IRTF

    {% for wg in irtf_list|dictsort:"acronym_lower" %} - - - - - -
    {{ wg.acronym|upper }}
    - {% if wg.agenda_file %}[Agenda]{% endif %}{% if wg.minute_file %} [Minutes]{% endif %}
    -{% include "meeting/submission_status_snippet.html" %} - {% for slide in wg.slides %} - - - {% endfor %} -
    {{ slide.slide_name }}
    -
    + {% include "meeting/list_group.html" %} {% endfor %} {% endif %} - - + + {% if interim_list %} - - - - -


    Interim Meetings

    -
    +

    Interim Meetings

    {% for wg in interim_list|dictsort:"acronym" %} - - - - -
    {{ wg.acronym|upper }}
    {% if wg.agenda_file %}[Agenda]{% endif %}{% if wg.minute_file %} [Minutes]{% endif %}
    -{% include "meeting/submission_status_snippet.html" %} - {% for slide in wg.slides %} - - {% endfor %} -
    {{ slide.slide_name }}
    -
    + {% include "meeting/list_group.html" %} {% endfor %} {% endif %} - -
    + + +{% endcache %} + {% endblock %} diff --git a/ietf/templates/meeting/list_closed.html b/ietf/templates/meeting/list_closed.html index 8e79de8d9..02eb8ca8e 100644 --- a/ietf/templates/meeting/list_closed.html +++ b/ietf/templates/meeting/list_closed.html @@ -1,17 +1,13 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} {% extends "base.html" %} -{% block title %} IETF {{ meeting_num }} Preliminary & Interim Materials {% endblock %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} +{% block title %}IETF {{ meeting_num }} Preliminary & Interim Materials {% endblock %} {% block content %} - -
    -

    IETF {{ meeting_num }} Preliminary & Interim Materials

    -

    Submission began {{ begin_date|date:"F j, Y" }}

    -

    Submission cutoff odate: {{ cut_off_date|date:"F j, Y" }}

    -

    Corrections to submissions cutoff date:{{ cor_cut_off_date|date:"F j, Y" }}

    -
    -
    - -The IETF {{ meeting_num }} Preliminary & Interim Materials page is now closed.
    -You should be able to access the final version of IETF {{ meeting_num }} Proceedings via IETF Online Proceedings page.
    -
    +

    IETF {{ meeting_num }} Preliminary & Interim Materials

    + +

    Submission began {{ begin_date|date:"F j, Y" }}
    +Submission cutoff date: {{ cut_off_date|date:"F j, Y" }}
    +Corrections to submissions cutoff date: {{ cor_cut_off_date|date:"F j, Y" }}

    + +

    The IETF {{ meeting_num }} Preliminary & Interim Materials page is now closed.
    +

    You should be able to access the final version of IETF {{ meeting_num }} Proceedings via the IETF Online Proceedings page.

    {% endblock %} diff --git a/ietf/templates/meeting/list_group.html b/ietf/templates/meeting/list_group.html new file mode 100644 index 000000000..fd7f683ce --- /dev/null +++ b/ietf/templates/meeting/list_group.html @@ -0,0 +1,14 @@ +{% load ietf_filters %} + + + +
    {% ifequal wg.acronym "plenaryw" %}Wednesday
    Plenary{%else%}{% ifequal wg.acronym "plenaryt" %}Thursday
    Plenary{%else%}{{ wg.acronym|upper }}{%endifequal%}{%endifequal%} +{% ifequal wg.group_type_str "BOF" %} ({{ wg.group_type_str }}) {% endifequal %}

    +
    +{% if wg.agenda_file %}Agenda{% else %}No agenda received {% endif %}
    +{% if wg.minute_file %} Minutes{% else %}No minutes received{% endif %}
    +{% for slide in wg.slides %} +{{ slide.slide_name|clean_whitespace }}
    +{% endfor %} +
    + diff --git a/ietf/templates/meeting/m_agenda.html b/ietf/templates/meeting/m_agenda.html new file mode 100644 index 000000000..eccc661e8 --- /dev/null +++ b/ietf/templates/meeting/m_agenda.html @@ -0,0 +1,130 @@ +{% extends "m_base.html" %} +{# Copyright The IETF Trust 2007, All Rights Reserved #} +{% load humanize %} +{% load ietf_filters %} +{% block title %} IETF {{ meeting.num }} Meeting Agenda{% endblock %} +{% block head %} + + + + + + +{% endblock %} +{% block body_attributes %}onload="on_load_actions()"{% endblock %} +{% block content %} + + + +
    + + {% for slot in timeslots %} + {% ifchanged %} + + + + {% if slot.reg_info %} + + + + {% endif %} + {% endifchanged %} + {% if slot.session_name %} + {% if slot.break_info %} + + + + {% endif %} + + + + {% if not slot.is_plenary %} + {% for session in slot.sessions_by_area|dictsort:"area" %} + + {% if session.info.area %} + + + {% endif %} + + {% endfor %} + {% endif %} + {% else %} + {% for session in slot.sessions %} + + + + {% endfor %} + {% endif %} + {% endfor %} +
    + +

    {{ slot.meeting_date|date:"l"|upper }}, {{ slot.meeting_date|date:"F j, Y" }}

    +
    + {{ slot.registration.time_desc }} {{ slot.registration.name }} + {% if venue.reg_area_name %} - {{ venue.reg_area_name|escape }}{% endif %} +
    +
    + {{ slot.break_info.time_desc }} {{ slot.break_info.name }} + {% if venue.break_area_name and slot.break_info.show_break_location %} - {{ venue.break_area_name|escape }}{% endif %} +
    + +
    {{slot.meeting_date|date:"D"}} {{ slot.time_desc }} {{ slot.session_name }} + {% ifequal slot.sessions.0.acronym "plenaryw" %} + - {{ slot.sessions.0.room_id.room_name }}
    +
    {{ plenaryw_agenda|escape }}
    + {% else %} + {% ifequal slot.sessions.0.acronym "plenaryt" %} + - {{ slot.sessions.0.room_id.room_name }} +
    {{ prenaryt_agenda|escape }}
    + {% else %} + + {% endifequal %} + {% endifequal %} + +
    {{ session.info.room_id.room_name}} + {% if session.info.isWG %}{%endif%} + {% ifequal session.info.group_type_str "BOF" %}{% endifequal %} + {{ session.info.acronym|lower }} + {% ifequal session.info.group_type_str "BOF" %}{% endifequal %} + {% if session.info.isWG %}{%endif%} + + {% if session.info.agenda_file %}{%endif%} + {{ session.info.acronym_name|truncate_ellipsis:"20"|safe }} + {% if session.info.agenda_file %}{%endif%} +
    + + {{ slot.time_desc }} {{ session.acronym_name|escape }} - {{ session.room_id.room_name}} +
    +
    +{% endblock %} diff --git a/ietf/templates/meeting/submission_status_snippet.html b/ietf/templates/meeting/submission_status_snippet.html deleted file mode 100644 index acaf77f24..000000000 --- a/ietf/templates/meeting/submission_status_snippet.html +++ /dev/null @@ -1,34 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} - {% if wg.agenda_file and wg.minute_file and wg.slides %} - Presentation files received, agenda received, minutes received - {% else %} - {% if not wg.agenda_file and not wg.minute_file and not wg.slides %} - No presentation files, no agenda, no minutes - {% else %} - {% if not wg.agenda_file and wg.minute_file and not wg.slides %} - No presentation files, no agenda, minutes received - {% else %} - {% if wg.agenda_file and not wg.minute_file and not wg.slides %} - No presentation files, agenda received, no minutes - {% else %} - {% if wg.agenda_file and wg.minute_file and not wg.slides %} - No presentation files, agenda received, minutes received - {% else %} - {% if not wg.agenda_file and not wg.minute_file and wg.slides %} - Presentation files received, no agenda, no minutes - {% else %} - {% if not wg.agenda_file and wg.minute_file and wg.slides %} - Presentation files received, no agenda, minutes received - {% else %} - {% if wg.agenda_file and not wg.minute_file and wg.slides %} - Presentation files received, agenda received, no minutes - {% else %} - Unknown Status - {% endif %} - {% endif %} - {% endif %} - {% endif %} - {% endif %} - {% endif %} - {% endif %} - {% endif %} diff --git a/ietf/templates/my/.gitignore b/ietf/templates/my/.gitignore deleted file mode 100644 index c7013ced9..000000000 --- a/ietf/templates/my/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/*.pyc -/settings_local.py diff --git a/ietf/templates/my/my.html b/ietf/templates/my/my.html deleted file mode 100644 index baab5c7dc..000000000 --- a/ietf/templates/my/my.html +++ /dev/null @@ -1,13 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} - -{% block content %} -

    -Hello, {{ me }}! -

    Internet Drafts that you Author (probably useless but hey)

    -
      -{% for doc in me.idauthors_set.all %} -
    • {{ doc.document.filename }}: {{ doc.document.status }} / {{ doc.document.idstate }} -{% endfor %} -
    -{% endblock %} diff --git a/ietf/templates/notify_expirations/body.txt b/ietf/templates/notify_expirations/body.txt new file mode 100644 index 000000000..bc07899a7 --- /dev/null +++ b/ietf/templates/notify_expirations/body.txt @@ -0,0 +1,6 @@ +The following draft will expire soon: + +Filename: {{draft.filename}} +Title: {{draft.title}} +State: {{draft.idstate}} +Expires: {{expiration}} (in {{expiration|timeuntil}}) diff --git a/ietf/templates/notify_expirations/subject.txt b/ietf/templates/notify_expirations/subject.txt new file mode 100644 index 000000000..7cb09c178 --- /dev/null +++ b/ietf/templates/notify_expirations/subject.txt @@ -0,0 +1 @@ +Expiration impending: {{draft.filename}} diff --git a/ietf/templates/registration/.gitignore b/ietf/templates/registration/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/ietf/templates/registration/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/ietf/templates/registration/login.html b/ietf/templates/registration/login.html deleted file mode 100644 index 43337c3be..000000000 --- a/ietf/templates/registration/login.html +++ /dev/null @@ -1,20 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} -{% extends "base.html" %} - -{% block content %} - -{% if form.has_errors %} -

    Your username and password didn't match our records. Please try again.

    -{% endif %} - -
    - - - -
    {{ form.username }}
    {{ form.password }}
    - - - -
    - -{% endblock %} diff --git a/ietf/templates/registration/profile.html b/ietf/templates/registration/profile.html new file mode 100644 index 000000000..5168bbefc --- /dev/null +++ b/ietf/templates/registration/profile.html @@ -0,0 +1,11 @@ +{# Copyright The IETF Trust 2007, All Rights Reserved #} +{% extends "base.html" %} + +{% block content %} +

    User information

    +

    User name: {{ user.username }}
    +Roles/Groups: {{ user.groups.all|join:", "|default:"(none)" }}
    +Person: {{ user.get_profile.person|default:"?" }} {% if user.get_profile.person %}({{user.get_profile.person.person_or_org_tag}}){% endif %}
    +IESG Login ID: {{ user.get_profile.iesg_login_id|default:"(none)" }}

    + +{% endblock %} diff --git a/ietf/templates/utils/all.html b/ietf/templates/utils/all.html deleted file mode 100644 index c95ff56fd..000000000 --- a/ietf/templates/utils/all.html +++ /dev/null @@ -1,17 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} - - - -Review pages - - -

    Review items

    - ( There are {{ count }} items in the list ) -
    - - \ No newline at end of file diff --git a/ietf/templates/utils/frame2.html b/ietf/templates/utils/frame2.html deleted file mode 100644 index ac127326e..000000000 --- a/ietf/templates/utils/frame2.html +++ /dev/null @@ -1,22 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} - - - -Comparison -- {{ info.old }} - {{ info.new }} - - - - - - - - - <p>this frameset document contains: - <ul> - <li><a href="{{ info.old }}">The old page</a><li> - <li><a href="{{ info.new }}">The new page</a><li> - </ul> - - - \ No newline at end of file diff --git a/ietf/templates/utils/review.html b/ietf/templates/utils/review.html deleted file mode 100644 index 9c636fd32..000000000 --- a/ietf/templates/utils/review.html +++ /dev/null @@ -1,24 +0,0 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} - - - -Comparison - {{ info.old }} / {{ info.new }} - - - - - - - - - -
    - < Prev -

    - {{ info.old }}
    All - Next > -

    - {{ info.new }}
    - - \ No newline at end of file diff --git a/ietf/templates/wginfo/1wg-charters-by-acronym.txt b/ietf/templates/wginfo/1wg-charters-by-acronym.txt new file mode 100644 index 000000000..1081f7660 --- /dev/null +++ b/ietf/templates/wginfo/1wg-charters-by-acronym.txt @@ -0,0 +1,4 @@ +{% load ietf_filters %}{% for wg in wg_list|dictsort:"group_acronym.acronym" %}{% ifequal wg.area.area.status_id 1 %}{% if wg.start_date %}{{ wg }} +{% endif %}{% endifequal %}{% endfor %} +{% for wg in wg_list|dictsort:"group_acronym.acronym" %}{% ifequal wg.area.area.status_id 1 %}{% if wg.start_date %}{% include "wginfo/wg-charter.txt" %} +{% endif %}{% endifequal %}{% endfor %} diff --git a/ietf/templates/wginfo/1wg-charters.txt b/ietf/templates/wginfo/1wg-charters.txt new file mode 100644 index 000000000..959b2c6e9 --- /dev/null +++ b/ietf/templates/wginfo/1wg-charters.txt @@ -0,0 +1,5 @@ +{% load ietf_filters %}{% regroup wg_list|dictsort:"area_acronym.acronym" by area.area as wga_list %}{% for area in wga_list %}{% for wg in area.list|dictsort:"group_acronym.name" %}{% ifequal wg.area.area.status_id 1 %}{% if wg.start_date %}{{ wg }} +{% endif %}{% endifequal %}{% endfor %}{% endfor %} +{% regroup wg_list|dictsort:"area_acronym.acronym" by area.area as wga_list %}{% for area in wga_list %}{% for wg in area.list|dictsort:"group_acronym.name" %}{% ifequal wg.area.area.status_id 1 %}{% if wg.start_date %} +{% include "wginfo/wg-charter.txt" %} +{% endif %}{% endifequal %}{% endfor %}{% endfor %} diff --git a/ietf/templates/wginfo/1wg-summary-by-acronym.txt b/ietf/templates/wginfo/1wg-summary-by-acronym.txt new file mode 100644 index 000000000..a7ea79517 --- /dev/null +++ b/ietf/templates/wginfo/1wg-summary-by-acronym.txt @@ -0,0 +1,15 @@ +{% load ietf_filters %} + IETF Working Group Summary (By Acronym) + + +The following Area Abreviations are used in this document +{% for area in area_list %} +{{ area }} - {{ area.area_acronym.name }}{% endfor %} +{% for wg in wg_list|dictsort:"group_acronym.acronym" %}{% if wg.start_date %} +{{ wg.group_acronym.name|safe }} ({{ wg }}) -- {{ wg.area.area|upper }} +{% for chair in wg.wgchair_set.all %}{% if forloop.first %} Chair{{ forloop.revcounter|pluralize:": ,s:" }} {% else %} {% endif %}{{ chair.person|safe }} <{{ chair.person.email.1 }}> +{% endfor %} WG Mail: {{ wg.email_address }} + To Join: {{ wg.email_subscribe }}{%if wg.email_keyword %} + In Body: {{ wg.email_keyword|safe }}{% endif %} + Archive: {{ wg.email_archive }} +{% endif %}{% endfor %} diff --git a/ietf/templates/wginfo/1wg-summary.txt b/ietf/templates/wginfo/1wg-summary.txt new file mode 100644 index 000000000..ffe19f733 --- /dev/null +++ b/ietf/templates/wginfo/1wg-summary.txt @@ -0,0 +1,13 @@ +{% load ietf_filters %} IETF Working Group Summary (By Area) +{% regroup wg_list|dictsort:"area.area.area_acronym.acronym" by area.area as wga_list %}{% for area in wga_list %}{% for wg in area.list|dictsort:"group_acronym.acronym" %}{% ifequal wg.area.area.status_id 1 %}{% if forloop.first %} +{{ wg.area_acronym.name }} ({{ wg.area_acronym }}) +{{ wg.area_acronym.name|dashify }}------{% for ad in wg.area_directors %} + {{ ad.person }} <{{ ad.person.email.1 }}>{% endfor %} +{% endif %}{% if wg.start_date %} +{{ wg.group_acronym.name|safe }} ({{ wg }}) +{% for chair in wg.wgchair_set.all %}{% if forloop.first %} Chair{{ forloop.revcounter|pluralize:": ,s:" }} {% else %} {% endif %}{{ chair.person|safe }} <{{ chair.person.email.1 }}> +{% endfor %} WG Mail: {{ wg.email_address }} + To Join: {{ wg.email_subscribe }}{%if wg.email_keyword %} + In Body: {{ wg.email_keyword|safe }}{% endif %} + Archive: {{ wg.email_archive }} +{% endif %}{% endifequal %}{% endfor %}{% endfor %} diff --git a/ietf/templates/wginfo/wg-charter.txt b/ietf/templates/wginfo/wg-charter.txt new file mode 100644 index 000000000..83ba033ca --- /dev/null +++ b/ietf/templates/wginfo/wg-charter.txt @@ -0,0 +1,47 @@ +{% load ietf_filters %}{{wg.group_acronym.name|safe}} ({{wg}}) +{{ wg.group_acronym.name|dashify }}{{ wg.group_acronym.acronym|dashify }}--- + + Charter + Last Modified: {{ wg.last_modified_date }} + + Current Status: {{ wg.status }} + + Chair{{ wg.chairs.count|pluralize:",s" }}: +{% for chair in wg.chairs %} {{ chair.person|safe }} <{{chair.person.email.1}}> +{% endfor %} + {{wg.area.area.area_acronym.name}} Directors: +{% for ad in wg.area_directors %} {{ ad.person|safe }} <{{ad.person.email.1}}> +{% endfor %} + {{wg.area.area.area_acronym.name}} Advisor: + {{ wg.area_director.person|safe }} <{{wg.area_director.person.email.1}}> +{% if wg.wgtechadvisor_set.count %} + Tech Advisor{{ wg.wgtechadvisor_set.count|pluralize:",s" }}: +{% for techadvisor in wg.wgtechadvisor_set.all %} {{ techadvisor.person|safe }} <{{techadvisor.person.email.1}}> +{% endfor %}{% endif %}{% if wg.wgeditor_set.count %} + Editor{{ wg.wgeditor_set.count|pluralize:",s" }}: +{% for editor in wg.wgeditor_set.all %} {{ editor.person|safe }} <{{editor.person.email.1}}> +{% endfor %}{% endif %}{% if wg.secretaries %} + Secretar{{ wg.secretaries.count|pluralize:"y,ies" }}: +{% for secretary in wg.secretaries %} {{ secretary.person|safe }} <{{secretary.person.email.1}}> +{% endfor %}{% endif %} + Mailing Lists: + General Discussion: {{ wg.email_address }} + To Subscribe: {{ wg.email_subscribe }} + Archive: {{ wg.email_archive }} + +Description of Working Group: + + {{ wg.charter_text|indent|safe }} + +Goals and Milestones: +{% for milestone in wg.milestones %} {% ifequal milestone.done 'Done' %}Done {% else %}{%ifequal milestone.expected_due_date.month 1 %}Jan{% endifequal %}{%ifequal milestone.expected_due_date.month 2 %}Feb{% endifequal %}{%ifequal milestone.expected_due_date.month 3 %}Mar{% endifequal %}{%ifequal milestone.expected_due_date.month 4 %}Apr{% endifequal %}{%ifequal milestone.expected_due_date.month 5 %}May{% endifequal %}{%ifequal milestone.expected_due_date.month 6 %}Jun{% endifequal %}{%ifequal milestone.expected_due_date.month 7 %}Jul{% endifequal %}{%ifequal milestone.expected_due_date.month 8 %}Aug{% endifequal %}{%ifequal milestone.expected_due_date.month 9 %}Sep{% endifequal %}{%ifequal milestone.expected_due_date.month 10 %}Oct{% endifequal %}{%ifequal milestone.expected_due_date.month 11 %}Nov{% endifequal %}{%ifequal milestone.expected_due_date.month 12 %}Dec{% endifequal %} {{ milestone.expected_due_date.year }}{% endifequal %} - {{ milestone.description|safe }} +{% endfor %} +Internet-Drafts: +{% for draft in wg.drafts %} - {{draft.title|safe}} [{{draft.filename}}-{{draft.revision}}] ({{ draft.txt_page_count }} pages) +{% endfor %} +{% if wg.rfcs %}Requests for Comments: +{% for rfc in wg.rfcs %} {{rfc}}: {{rfc.title|safe}} ({{ rfc.txt_page_count }} pages){% for obs in rfc.obsoletes%} + * {{obs.action}} RFC{{obs.rfc_acted_on_id}}{% endfor %}{% for obs in rfc.obsoleted_by%} + * {%ifequal obs.action 'Obsoletes'%}OBSOLETED BY{%else%}Updated by{%endifequal%} RFC{{obs.rfc_id}}{% endfor %} +{%endfor%} +{%else%}No Requests for Comments{% endif %} diff --git a/ietf/templates/wginfo/wg-dir.html b/ietf/templates/wginfo/wg-dir.html new file mode 100644 index 000000000..95b2c2296 --- /dev/null +++ b/ietf/templates/wginfo/wg-dir.html @@ -0,0 +1,92 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2009, All Rights Reserved #} +{% comment %} +Portion Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. Contact: Pasi Eronen + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the Nokia Corporation and/or its + subsidiary(-ies) nor the names of its contributors may be used + to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +{% endcomment %} + +{% block title %}Active IETF Working Groups{% endblock %} + +{% block morecss %} +.ietf-wg-table { width: 100%; max-width:50em; } +.ietf-wg-table tr { vertical-align:top; } +{% endblock morecss %} + +{% block content %} +

    Active IETF Working Groups

    + +

    See also: Concluded +Working Groups (www.ietf.org), Concluded Working Groups (tools.ietf.org), Historic Charters.

    + +{% for area in areas|dictsort:"area_acronym.name" %} +

    {{ area.area_acronym.name }}

    + +{% for ad in area.areadirector_set.all|dictsort:"person.last_name" %} +{% if forloop.first %} +

    Area Director{{ forloop.revcounter|pluralize }}:

    +

    +{% endif %} +{{ ad.person }} <{{ ad.person.email.1 }}>{% if not forloop.last %}
    {% endif %} +{% if forloop.last %} +

    +{% endif %} +{% endfor %} + +{% for url in area.additional_urls %} +{% if forloop.first %} +

    Area Specific Web Page{{ forloop.revcounter|pluralize}}:

    +

    +{% endif %} +{{ url.description }}{% if not forloop.last %}
    {% endif %} +{% if forloop.last %} +

    +{% endif %} +{% endfor %} + +{% for wg in area.active_wgs %} +{% if forloop.first %} +

    Active Working Groups:

    +
    + +{% endif %} + + +{% if forloop.last %} +
    {{ wg }}{{ wg.group_acronym.name }}{% for chair in wg.chairs %}{{chair.person}}{% if not forloop.last %}, {% endif %}{% endfor %}
    +
    +{% endif %} +{% empty %} +

    No Active Working Groups

    +{% endfor %}{# wg #} + +{% endfor %}{# area #} +{% endblock %} diff --git a/ietf/templates/wginfo/wg_base.html b/ietf/templates/wginfo/wg_base.html new file mode 100644 index 000000000..438724c11 --- /dev/null +++ b/ietf/templates/wginfo/wg_base.html @@ -0,0 +1,71 @@ +{% extends "base.html" %} +{% comment %} +Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. Contact: Pasi Eronen + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the Nokia Corporation and/or its + subsidiary(-ies) nor the names of its contributors may be used + to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +{% endcomment %} +{% load ietf_filters %} +{% block title %}{{wg.group_acronym.name}} ({{wg.group_acronym.acronym}}) - {% block wg_titledetail %}{% endblock %}{% endblock %} + +{% block morecss %} +.ietf-navset { + background:#214197 url(/images/yui/sprite.png) repeat-x left -1400px; + color:white; + border:1px solid black; + padding:4px; +} +.ietf-navset .selected { font-weight:bold; padding: 0 3px; } +.ietf-navset a, .ietf-navset a:visited { color: white; padding:0 3px; } + +.ietf-wg-details { float:right; padding: 4px;margin-top:16px; margin-left: 16px; } +.ietf-wg-details tr { vertical-align: top; } +.ietf-concluded-bg {background-color: #F8F8D0; } +.ietf-concluded-warning { background:red;color:white;padding:2px 2px;} +{% endblock morecss %} + +{% block content %} +
    +

    {{wg.group_acronym.name}} ({{wg.group_acronym.acronym}}){% if concluded %}
    (concluded WG){% endif %}

    + +
    +{% ifequal selected "documents" %}Documents{% else %}Documents{% endifequal %} | +{% ifequal selected "charter" %}Charter{% else %}Charter{% endifequal %} | +{% if wg.clean_email_archive|startswith:"http:" or wg.clean_email_archive|startswith:"ftp:" %} +List Archive » | +{% endif %} +Tools WG Page » +
    + +{% block wg_content %} +{% endblock wg_content %} + +
    +{% endblock content %} diff --git a/ietf/templates/wginfo/wg_charter.html b/ietf/templates/wginfo/wg_charter.html new file mode 100644 index 000000000..c4aa2fcaa --- /dev/null +++ b/ietf/templates/wginfo/wg_charter.html @@ -0,0 +1,153 @@ +{% extends "wginfo/wg_base.html" %} +{% comment %} +Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. Contact: Pasi Eronen + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the Nokia Corporation and/or its + subsidiary(-ies) nor the names of its contributors may be used + to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +{% endcomment %} +{% load ietf_filters %} +{% block wg_titledetail %}Charter{% endblock %} + +{% block wg_content %} + +
    +{% if concluded %} +Note: The data for concluded WGs +is occasionally incorrect. +{% endif %} + + + + + + + + + + + +{% if wg.wgtechadvisor_set.count %} + + + +{% endif %} +{% if wg.wgeditor_set.count %} + + +{% endif %} +{% if wg.secretaries %} + + +{% endif %} + + + + + + + + + +{% if not concluded %} + + + + + + +{% endif %} + +
    +Personnel +
    Chair{{ wg.chairs.count|pluralize:",s" }}: +{% for chair in wg.chairs %} +{{ chair.person|escape }} <{{ chair.person.email.1 }}>
    +{% endfor %} +
    Area Director: +{% ifequal wg.area_director.person.email.1 "noreply@ietf.org" %}?{%else%} +{{ wg.area_director.person }} <{{wg.area_director.person.email.1 }}>{% endifequal %} +
    Tech Advisor{{ wg.wgtechadvisor_set.count|pluralize:",s" }}: +{% for techadvisor in wg.wgtechadvisor_set.all %} +{{ techadvisor.person }} <{{ techadvisor.person.email.1 }}>
    +{% endfor %} +
    Editor{{ wg.wgeditor_set.count|pluralize:",s" }}: +{% for editor in wg.wgeditor_set.all %} +{{ editor.person }} <{{ editor.person.email.1 }}>
    +{% endfor %} +
    Secretar{{ wg.secretaries.count|pluralize:"y,ies" }}: +{% for secretary in wg.secretaries %} +{{ secretary.person }} <{{ secretary.person.email.1 }}>
    +{% endfor %} +
    +
    Mailing List +
    Address:{{ wg.email_address|urlize }}
    To Subscribe:{{ wg.email_subscribe|urlize }}
    Archive:{{ wg.clean_email_archive|urlize }}
    +
    Jabber Chat +
    Room Address:xmpp:{{wg}}@jabber.ietf.org
    Logs:http://jabber.ietf.org/logs/{{wg}}/
    +
    + +{% if wg.additional_urls %} +

    In addition to the charter maintained by the IETF Secretariat, there is additional information about this working group on the Web at: +{% for url in wg.additional_urls %} +{{ url.description}}{% if not forloop.last %}, {% endif %} +{% endfor %} +

    +{% endif %} + +

    Description of Working Group

    +

    {{ wg.charter_text|escape|format_charter|safe }}

    + +

    Goals and Milestones

    + +{% for milestone in wg.milestones %} + + + +{% endfor %} +
    + {% ifequal milestone.done 'Done' %} Done + {% else %} + {%ifequal milestone.expected_due_date.month 1 %}Jan{% endifequal %} + {%ifequal milestone.expected_due_date.month 2 %}Feb{% endifequal %} + {%ifequal milestone.expected_due_date.month 3 %}Mar{% endifequal %} + {%ifequal milestone.expected_due_date.month 4 %}Apr{% endifequal %} + {%ifequal milestone.expected_due_date.month 5 %}May{% endifequal %} + {%ifequal milestone.expected_due_date.month 6 %}Jun{% endifequal %} + {%ifequal milestone.expected_due_date.month 7 %}Jul{% endifequal %} + {%ifequal milestone.expected_due_date.month 8 %}Aug{% endifequal %} + {%ifequal milestone.expected_due_date.month 9 %}Sep{% endifequal %} + {%ifequal milestone.expected_due_date.month 10 %}Oct{% endifequal %} + {%ifequal milestone.expected_due_date.month 11 %}Nov{% endifequal %} + {%ifequal milestone.expected_due_date.month 12 %}Dec{% endifequal %} + {{ milestone.expected_due_date.year }} + {% endifequal %} + {{ milestone.description|escape }} +
    +{% endblock wg_content %} + diff --git a/ietf/templates/wginfo/wg_documents.html b/ietf/templates/wginfo/wg_documents.html new file mode 100644 index 000000000..91bfc5ed8 --- /dev/null +++ b/ietf/templates/wginfo/wg_documents.html @@ -0,0 +1,54 @@ +{% extends "wginfo/wg_base.html" %} +{% comment %} +Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. Contact: Pasi Eronen + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the Nokia Corporation and/or its + subsidiary(-ies) nor the names of its contributors may be used + to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +{% endcomment %} +{% block wg_titledetail %}Documents{% endblock %} + +{% block wg_content %} + +{% regroup docs by view_sort_group as grouped_docs %} + + + +{% for doc_group in grouped_docs %} + + +{% for doc in doc_group.list %} +{% include "idrfc/search_result_row.html" %} +{% endfor %} + +{% endfor %} +
    DocumentTitleDateStatusArea Director
    {{doc_group.grouper}}s
    + +{% endblock wg_content %} + diff --git a/ietf/tests.py b/ietf/tests.py deleted file mode 100644 index 2e532d97c..000000000 --- a/ietf/tests.py +++ /dev/null @@ -1,437 +0,0 @@ -# Copyright The IETF Trust 2007, All Rights Reserved - -import os -import re -import traceback -import urllib2 as urllib -from datetime import datetime - -from ietf.utils import soup2text as html2text -from difflib import unified_diff - -import django.test.simple -from django.test import TestCase -from django.conf import settings -from django.db import connection -from django.core import management -import ietf.urls -from ietf.utils import log - -startup_database = settings.DATABASE_NAME # The startup database name, before changing to test_... - -def run_tests(module_list, verbosity=0, extra_tests=[]): - module_list.append(ietf.urls) - # If we append 'ietf.tests', we get it twice, first as itself, then - # during the search for a 'tests' module ... - return django.test.simple.run_tests(module_list, 0, extra_tests) - -def reduce_text(html, pre=False, fill=True): - if html.count("
  • ") > 5*html.count("
  • "): - html = html.replace("
  • ", "
  • ") - if not fill: - html = re.sub("
    ", "

    ", html) - html = re.sub(r"(?i)(RFC) (\d+)", r"\1\2", html) # ignore "RFC 1234" vs. "RFC1234" diffs - html = re.sub(r"\bID\b", r"I-D", html) # idnore " ID " vs. " I-D " diffs - text = html2text(html, pre=pre, fill=fill).strip() - text = text.replace(" : ", ": ").replace(" :", ": ") - text = text.replace('."', '".') - text = text.replace(',"', '",') - return text - -def lines(text, pre=False): - if pre: - text = text.split("\n") - else: - text = [ line.strip() for line in text.split("\n") if line.strip()] - return text - -def sorted(lst): - lst.sort() - return lst - -def get_patterns(module): - all = [] - try: - patterns = module.urlpatterns - except AttributeError: - patterns = [] - for item in patterns: - try: - subpatterns = get_patterns(item.urlconf_module) - except: - subpatterns = [""] - for sub in subpatterns: - if not sub: - all.append(item.regex.pattern) - elif sub.startswith("^"): - all.append(item.regex.pattern + sub[1:]) - else: - all.append(item.regex.pattern + ".*" + sub) - return all - -def split_url(url): - if "?" in url: - url, args = url.split("?", 1) - args = dict([ arg.split("=", 1) for arg in args.split("&") if "=" in arg ]) - else: - args = {} - return url, args - -def read_testurls(filename): - tuples = [] - file = open(filename) - for line in file: - line = line.strip() - if line and not line.startswith('#'): - line = line.split("#", 1)[0] - urlspec = line.split() - if len(urlspec) == 2: - codes, testurl = urlspec - goodurl = None - elif len(urlspec) == 3: - codes, testurl, goodurl = urlspec - # strip protocol and host -- we're making that configurable - goodurl = re.sub("^https?://[a-z0-9.]+", "", goodurl) - if not goodurl.startswith("/"): - goodurl = "/" + goodurl - else: - raise ValueError("Expected 'HTTP_CODE TESTURL [GOODURL]' in %s line, found '%s'." % (filename, line)) - - - codes = dict([ (item, "") for item in codes.split(",") if not":" in item] + - [ (item.split(":")[:2]) for item in codes.split(",") if ":" in item] ) - tuples += [ (codes, testurl, goodurl) ] - file.close() - return tuples - -def get_testurls(): - testtuples = [] - for root, dirs, files in os.walk(settings.BASE_DIR): - if "testurl.list" in files: - testtuples += read_testurls(root+"/testurl.list") - if "testurls.list" in files: - testtuples += read_testurls(root+"/testurls.list") - return testtuples - -def filetext(filename): - file = open(filename) - chunk = file.read() - file.close() - return chunk - - -prev_note_time = datetime.utcnow() -def note(string): - global prev_note_time - """Like a print function, but adds a leading timestamp line""" - now = datetime.utcnow() - print string - print now.strftime(" %Y-%m-%d_%H:%M"), "+%ds" % (now-prev_note_time).seconds - prev_note_time = datetime.utcnow() - -def module_setup(module): - # get selected prefixes, if any - module.prefixes = os.environ.get("URLPREFIX", "").split() - - # find test urls - module.testtuples = [] - module.testurls = [] - module.diffchunks = {} - module.ignores = {} - module.testtuples = get_testurls() - module.testurls = [ tuple[1] for tuple in module.testtuples ] - - # find diff chunks - testdir = os.path.abspath(settings.BASE_DIR+"/../test/diff/") - for item in os.listdir(testdir): - path = testdir + "/" + item - if item.startswith("generic-") and os.path.isfile(path): - chunk = filetext(path).rstrip() - chunk = re.sub(r"([\[\]().|+*?])", r"\\\1", chunk) - # @@ -27,0 \+23,1 @@ - chunk = re.sub(r"(?m)^@@ -\d+,(\d+) \\\+\d+,(\d+) @@$", r"@@ -\d+,\1 \+\d+,\2 @@", chunk) - module.diffchunks[item] = chunk - - # find ignore chunks - for root, dirs, files in os.walk(settings.BASE_DIR+"/../test/ignore/"): - # This only expects one directory level below test/ignore/: - for file in files: - path = root + "/" + file - dir = root.split("/")[-1] - chunk = filetext(path).strip() - if not dir in module.ignores: - module.ignores[dir] = [] - module.ignores[dir].append(chunk) - - # extract application urls: - module.patterns = get_patterns(ietf.urls) - - # apply prefix filters - if module.prefixes: - module.patterns = [ pattern for pattern in module.patterns for prefix in module.prefixes if re.match(prefix, pattern) ] - module.testtuples = [ tuple for tuple in module.testtuples for prefix in module.prefixes if re.match(prefix, tuple[1][1:]) ] - - - # Use the default database for the url tests, instead of the test database - module.testdb = settings.DATABASE_NAME - connection.close() - settings.DATABASE_NAME = startup_database - # Install updated fixtures: - # Also has the side effect of creating the database connection. - management.syncdb(verbosity=1, interactive=False) - connection.close() - settings.DATABASE_NAME = module.testdb - connection.cursor() - -class UrlTestCase(TestCase): - - def __init__(self, *args, **kwargs): - TestCase.__init__(self, *args, **kwargs) - - - def setUp(self): - from django.test.client import Client - self.client = Client() - - self.testdb = settings.DATABASE_NAME - connection.close() - settings.DATABASE_NAME = startup_database - connection.cursor() - - def tearDown(self): - # Revert to using the test database - connection.close() - settings.DATABASE_NAME = self.testdb - connection.cursor() - - def testCoverage(self): - covered = [] - for codes, testurl, goodurl in module.testtuples: - for pattern in module.patterns: - if re.match(pattern, testurl[1:]): - covered.append(pattern) - # We should have at least one test case for each url pattern declared - # in our Django application: - #self.assertEqual(set(patterns), set(covered), "Not all the - #application URLs has test cases. The missing are: %s" % (list(set(patterns) - set(covered)))) - if not set(module.patterns) == set(covered): - missing = list(set(module.patterns) - set(covered)) - print "Not all the application URLs has test cases, there are %d missing." % (len(missing)) - print "The ones missing are: " - for pattern in missing: - if not pattern[1:].split("/")[0] in [ "admin", "accounts" ]: - print "NoTest", pattern - print "" - else: - print "All the application URL patterns seem to have test cases." - #print "Not all the application URLs has test cases." - - def doRedirectsTest(self, lst): - response_count = {} - for codes, url, master in lst: - if "skipredir" in codes or "Skipredir" in codes or "skipredirect" in codes: - print "Skipping %s" % (url) - elif url and master: - testurl = master.replace('https://datatracker.ietf.org','') - baseurl, args = split_url(testurl) - try: - response = self.client.get(baseurl, args) - code = str(response.status_code) - if code == "301": - if response['Location'] == url: - note("OK %s %s -> %s" % (code, testurl, url)) - res = ("OK", code) - else: - print "Miss %3s %s ->" % (code, testurl) - print " %s" % (response['Location']) - note( " (wanted %s)" % (url)) - print "" - res = None - #res = ("Fail", "wrong-reponse") - else: - note("Fail %s %s" % (code, testurl)) - res = ("Fail", code) - except: - res = ("Fail", "Exc") - note("Exception for URL '%s'" % testurl) - traceback.print_exc() - if res: - if not res in response_count: - response_count[res] = 0 - response_count[res] += 1 - if response_count: - print "" - note("Response count:") - for res in response_count: - ind, code = res - print " %-4s %s: %s " % (ind, code, response_count[res]) - for res in response_count: - ind, code = res - self.assertEqual(ind, "OK", "Found %s cases of result code: %s" % (response_count[res], code)) - if response_count: - print "" - - def doUrlsTest(self, lst): - response_count = {} - for codes, url, master in lst: - if "skip" in codes or "Skip" in codes: - print "Skipping %s" % (url) - elif url: - baseurl, args = split_url(url) - #print "Trying codes, url: (%s, '%s')" % (codes, url) - try: - response = self.client.get(baseurl, args) - code = str(response.status_code) - if code in codes: - note("OK %s %s" % (code, url)) - res = ("OK", code) - else: - note("Fail %s %s" % (code, url)) - res = ("Fail", code) - except: - res = ("Fail", "Exc") - note("Exception for URL '%s'" % url) - traceback.print_exc() - if master and not "skipdiff" in codes: - hostprefix = settings.TEST_REFERENCE_URL_PREFIX - if hostprefix.endswith("/"): - hostprefix = hostprefix[:-1] - master = hostprefix + master - goodhtml = None - try: - #print "Fetching", master, "...", - mfile = urllib.urlopen(master) - goodhtml = mfile.read() - mfile.close() - note(" 200 %s" % (master)) - except urllib.URLError, e: - note(" Error retrieving %s: %s" % (master, e)) - except urllib.BadStatusLine, e: - note(" Error retrieving %s: %s" % (master, e)) - try: - if goodhtml and response.content: - if "sort" in codes: - testtext = reduce_text(response.content, fill=False) - goodtext = reduce_text(goodhtml, fill=False) - else: - testtext = reduce_text(response.content) - goodtext = reduce_text(goodhtml) - # Always ignore some stuff - for regex in module.ignores["always"]: - testtext = re.sub(regex, "", testtext) - goodtext = re.sub(regex, "", goodtext) - if "ignore" in codes: - ignores = codes["ignore"].split("/") - for ignore in ignores: - for regex in module.ignores[ignore]: - testtext = re.sub(regex, "", testtext) - goodtext = re.sub(regex, "", goodtext) - #log("Checking text: %s" % testtext[:96]) - testtext = lines(testtext.strip()) - goodtext = lines(goodtext.strip()) - if "sort" in codes: - testtext = sorted(testtext) - while testtext and not testtext[0]: - del testtext[0] - while testtext and not testtext[-1]: - del testtext[-1] - goodtext = sorted(goodtext) - while goodtext and not goodtext[0]: - del goodtext[0] - while goodtext and not goodtext[-1]: - del goodtext[-1] - if testtext == goodtext: - note("OK cmp %s" % (url)) - else: - contextlines = 0 - difflist = list(unified_diff(goodtext, testtext, master, url, "", "", contextlines, lineterm="")) - diff = "\n".join(difflist[2:]) - log("Checking diff: %s" % diff[:96]) - keys = module.diffchunks.keys() - keys.sort - for key in keys: - chunk = module.diffchunks[key] - if chunk: - if not re.search(chunk, diff): - log("No match: %s" % chunk[:96]) - while re.search(chunk, diff): - log("Found chunk: %s" % chunk[:96]) - diff = re.sub(chunk, "", diff) - if len(diff.strip().splitlines()) == 2: - # only the initial 2 lines of the diff remains -- - # discard them too - diff = "" - if diff: - dfile = "%s/../test/diff/%s" % (settings.BASE_DIR, re.sub("[/?&=]", "_", url) ) - if os.path.exists(dfile): - dfile = open(dfile) - #print "Reading OK diff file:", dfile.name - okdiff = dfile.read() - dfile.close() - else: - okdiff = "" - if diff.strip() == okdiff.strip(): - note("OK cmp %s" % (url)) - else: - if okdiff: - note("Failed diff: %s" % (url)) - else: - note("Diff: %s" % (url)) - print "\n".join(diff.split("\n")[:100]) - if len(diff.split("\n")) > 100: - print "... (skipping %s lines of diff)" % (len(difflist)-100) - else: - note("OK cmp %s" % (url)) - - except: - note("Exception occurred for url %s" % (url)) - traceback.print_exc() - #raise - - if not res in response_count: - response_count[res] = 0 - response_count[res] += 1 - else: - pass - if response_count: - print "" - note("Response count:") - for res in response_count: - ind, code = res - print " %-4s %s: %s " % (ind, code, response_count[res]) - for res in response_count: - ind, code = res - self.assertEqual(ind, "OK", "Found %s cases of result code: %s" % (response_count[res], code)) - if response_count: - print "" - - def testUrlsList(self): - note("\nTesting specified URLs:") - self.doUrlsTest(module.testtuples) - - def testRedirectsList(self): - note("\nTesting specified Redirects:") - self.doRedirectsTest(module.testtuples) - - def testUrlsFallback(self): - note("\nFallback: Test access to URLs which don't have an explicit test entry:") - lst = [] - for pattern in module.patterns: - if pattern.startswith("^") and pattern.endswith("$"): - url = "/"+pattern[1:-1] - # if there is no variable parts in the url, test it - if re.search("^[-a-z0-9./_]*$", url) and not url in module.testurls and not url.startswith("/admin/"): - lst.append((["200"], url, None)) - else: - #print "No fallback test for %s" % (url) - pass - else: - lst.append((["Skip"], pattern, None)) - - self.doUrlsTest(lst) - - -class Module: - pass -module = Module() -module_setup(module) diff --git a/ietf/testurl.list b/ietf/testurl.list deleted file mode 100644 index 84e6e12d6..000000000 --- a/ietf/testurl.list +++ /dev/null @@ -1,9 +0,0 @@ -# Top-level test URL list. Should probably be empty; all test URls should be -# specified in the application level testurl.list - -301 / # Top level url. Has no comparable page today. -skip /images/ietf-icon.bmp -skip /css/base.css -skip /js/ - -200 /googlea30ad1dacffb5e5b.html # Google webmaster tool verification page diff --git a/ietf/urls.py b/ietf/urls.py index 321cedf22..41ce897bc 100644 --- a/ietf/urls.py +++ b/ietf/urls.py @@ -1,51 +1,70 @@ -# Copyright The IETF Trust 2007, All Rights Reserved +# Copyright The IETF Trust 2007, 2009, All Rights Reserved +import django from django.conf.urls.defaults import patterns, include, handler404, handler500 +from django.contrib import admin -from ietf.iesg.feeds import IESGMinutes -from ietf.idtracker.feeds import DocumentComments +from ietf.iesg.feeds import IESGAgenda +from ietf.idtracker.feeds import DocumentComments, InLastCall from ietf.ipr.feeds import LatestIprDisclosures +from ietf.proceedings.feeds import LatestWgProceedingsActivity +from ietf.liaisons.feeds import Liaisons + +from ietf.idtracker.sitemaps import IDTrackerMap, DraftMap +from ietf.liaisons.sitemaps import LiaisonMap +from ietf.ipr.sitemaps import IPRMap +from ietf.announcements.sitemaps import NOMCOMAnnouncementsMap from django.conf import settings +admin.autodiscover() + feeds = { - 'iesg_minutes': IESGMinutes, + 'iesg-agenda': IESGAgenda, + 'last-call': InLastCall, 'comments': DocumentComments, 'ipr': LatestIprDisclosures, + 'liaison': Liaisons, + 'wg-proceedings' : LatestWgProceedingsActivity +} + +sitemaps = { + 'idtracker': IDTrackerMap, + 'drafts': DraftMap, + 'liaison': LiaisonMap, + 'ipr': IPRMap, + 'nomcom-announcements': NOMCOMAnnouncementsMap, } urlpatterns = patterns('', - (r'^feed/(?P.*)/$', 'django.contrib.syndication.views.feed', - { 'feed_dict': feeds}), - (r'^ann/', include('ietf.announcements.urls')), - (r'^idtracker/', include('ietf.idtracker.urls')), - #(r'^my/', include('ietf.my.urls')), - (r'^drafts/', include('ietf.idindex.urls')), - (r'^iesg/', include('ietf.iesg.urls')), - (r'^liaison/', include('ietf.liaisons.urls')), - (r'^list/', include('ietf.mailinglists.urls')), - (r'^(?Ppublic|cgi-bin)/', include('ietf.redirects.urls')), - (r'^ipr/', include('ietf.ipr.urls')), - (r'^meeting/', include('ietf.meeting.urls')), - (r'^accounts/', include('ietf.ietfauth.urls')), + (r'^feed/(?P.*)/$', 'django.contrib.syndication.views.feed', + { 'feed_dict': feeds}), + (r'^sitemap.xml$', 'django.contrib.sitemaps.views.index', + { 'sitemaps': sitemaps}), + (r'^sitemap-(?P
    .+).xml$', 'django.contrib.sitemaps.views.sitemap', + {'sitemaps': sitemaps}), + (r'^ann/', include('ietf.announcements.urls')), + (r'^idtracker/', include('ietf.idtracker.urls')), + (r'^drafts/', include('ietf.idindex.urls')), + (r'^iesg/', include('ietf.iesg.urls')), + (r'^liaison/', include('ietf.liaisons.urls')), + (r'^list/', include('ietf.mailinglists.urls')), + (r'^(?Ppublic)/', include('ietf.redirects.urls')), + (r'^ipr/', include('ietf.ipr.urls')), + (r'^meeting/', include('ietf.meeting.urls')), + (r'^accounts/', include('ietf.ietfauth.urls')), + (r'^doc/', include('ietf.idrfc.urls')), + (r'^wg/', include('ietf.wginfo.urls')), - (r'^$', 'ietf.redirects.views.redirect'), - - # Uncomment this for admin: - (r'^admin/', include('django.contrib.admin.urls')), - - # Uncomment this for review pages: - (r'^review/$', 'ietf.utils.views.review'), - (r'^review/all/$', 'ietf.utils.views.all'), - (r'^review/(?P[0-9a-f]+)/$', 'ietf.utils.views.review'), - (r'^review/top/(?P[0-9a-f]+)/$', 'ietf.utils.views.top'), - - # Google webmaster tools verification url - (r'googlea30ad1dacffb5e5b.html', 'django.views.generic.simple.direct_to_template', { 'template': 'googlea30ad1dacffb5e5b.html' }) + (r'^$', 'ietf.idrfc.views.main'), + ('^admin/', include(admin.site.urls)), + # Google webmaster tools verification url + (r'^googlea30ad1dacffb5e5b.html', 'django.views.generic.simple.direct_to_template', { 'template': 'googlea30ad1dacffb5e5b.html' }), ) if settings.SERVER_MODE in ('development', 'test'): urlpatterns += patterns('', (r'^(?P(?:images|css|js)/.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}), + (r'^_test500/$', lambda x: None), ) diff --git a/ietf/utils/__init__.py b/ietf/utils/__init__.py index 2c58341c9..19ad8d4e9 100644 --- a/ietf/utils/__init__.py +++ b/ietf/utils/__init__.py @@ -1,14 +1,9 @@ # Copyright The IETF Trust 2007, All Rights Reserved -from listop import orl, flattenl from log import log from cache_foreign_key import FKAsOneToOne -from templated_form import makeTemplatedForm -from soup2text import TextSoup, soup2text from draft_search import normalize_draftname -makeFormattingForm = makeTemplatedForm - # See http://docs.python.org/tut/node8.html regarding the use of __all__ and # also regarding the practice of using "from xxx import *" in interactive # sessions vs. in source files. diff --git a/ietf/utils/broken_foreign_key.py b/ietf/utils/broken_foreign_key.py new file mode 100644 index 000000000..da5648282 --- /dev/null +++ b/ietf/utils/broken_foreign_key.py @@ -0,0 +1,43 @@ +from django.db import models + +class InvalidToNoneOverrider(object): + """Converts invalid ids to None before returning them to Django.""" + def __init__(self, cls, fieldname, null_values): + self.fieldname = fieldname + self.real_field = getattr(cls, fieldname) + self.null_values = null_values + + def __get__(self, instance, instance_type=None): + if instance is None: # calls on the class + return self + + v = getattr(instance, u"%s_id" % self.fieldname) + if v == None or v in self.null_values: + return None + else: + # forward to real field + return self.real_field.__get__(instance, instance_type) + + def __set__(self, instance, value): + # forward to real field + self.real_field.__set__(instance, value) + +class BrokenForeignKey(models.ForeignKey): + """ForeignKey for when some null values aren't NULL in the database. + + Django is strict with foreign keys, invalid ids result in + DoesNotExist in inconvenient places. With this field, invalid ids + are overridden to return None. Takes a keyword argument + 'null_values' to determine which ids should be considered + invalid and equivalent to NULL.""" + + def __init__(self, *args, **kwargs): + self.broken_null_values = kwargs.pop('null_values', (0,)) + super(self.__class__, self).__init__(*args, **kwargs) + +def broken_foreign_key_class_prepared_handler(sender, **kwargs): + for f in sender._meta.fields: + if type(f) == BrokenForeignKey: + setattr(sender, f.name, InvalidToNoneOverrider(sender, f.name, f.broken_null_values)) + +models.signals.class_prepared.connect(broken_foreign_key_class_prepared_handler) diff --git a/ietf/utils/cache_foreign_key.py b/ietf/utils/cache_foreign_key.py index a1470847d..cbf43f8f7 100644 --- a/ietf/utils/cache_foreign_key.py +++ b/ietf/utils/cache_foreign_key.py @@ -36,12 +36,7 @@ class FKAsOneToOne(object): return instance._field_values[self.field] - def __set__(self, instance, value): - if self.reverse: - # this is dangerous - #other_instance = self.__get_attr(instance).all()[0] - #setattr(other_instance, self.field, value) - #other_instance.save() - raise NotImplemented - else: - setattr(instance, self.field, value) + # We don't try to be smart and define __set__ to adjust the other + # end of the relation since that could require setting several + # fields, failing silently with a naive implementation. Updating + # the other end is the responsibility of the caller. diff --git a/ietf/utils/cached_lookup_field.py b/ietf/utils/cached_lookup_field.py new file mode 100644 index 000000000..5c56ab7c8 --- /dev/null +++ b/ietf/utils/cached_lookup_field.py @@ -0,0 +1,34 @@ +from django.core.exceptions import ObjectDoesNotExist + +class CachedLookupField(object): + """Django field for looking up and caching another object, like a + ForeignKey only you must supply a function for doing the lookup + yourself (and there's no reverse magic). Useful in case a real foreign + key is missing. "lookup" is called on the first access to the field + and gets the instance as sole argument; it should return an object + or throw a DoesNotExist exception (which is normalized to None), e.g. + + class A(django.db.models.Model): + foo = CachedLookupField(lookup=lambda self: Foo.objects.get(key=self.key)) + key = CharField() + """ + + def __init__(self, lookup): + self.lookup = lookup + self.value = None + self.value_cached = False + + def __get__(self, instance, instance_type=None): + if not instance_type: + return self + + if not self.value_cached: + try: + self.value = self.lookup(instance) + except ObjectDoesNotExist: + self.value = None + self.value_cached = True + + return self.value + + diff --git a/ietf/utils/html.py b/ietf/utils/html.py new file mode 100644 index 000000000..499189635 --- /dev/null +++ b/ietf/utils/html.py @@ -0,0 +1,53 @@ +# Taken from http://code.google.com/p/soclone/source/browse/trunk/soclone/utils/html.py + +"""Utilities for working with HTML.""" +import html5lib +from html5lib import sanitizer, serializer, tokenizer, treebuilders, treewalkers + +class HTMLSanitizerMixin(sanitizer.HTMLSanitizerMixin): + acceptable_elements = ('a', 'abbr', 'acronym', 'address', 'b', 'big', + 'blockquote', 'br', 'caption', 'center', 'cite', 'code', 'col', + 'colgroup', 'dd', 'del', 'dfn', 'dir', 'div', 'dl', 'dt', 'em', 'font', + 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'ins', 'kbd', + 'li', 'ol', 'p', 'pre', 'q', 's', 'samp', 'small', 'span', 'strike', + 'strong', 'sub', 'sup', 'table', 'tbody', 'td', 'tfoot', 'th', 'thead', + 'tr', 'tt', 'u', 'ul', 'var') + + acceptable_attributes = ('abbr', 'align', 'alt', 'axis', 'border', + 'cellpadding', 'cellspacing', 'char', 'charoff', 'charset', 'cite', + 'cols', 'colspan', 'datetime', 'dir', 'frame', 'headers', 'height', + 'href', 'hreflang', 'hspace', 'lang', 'longdesc', 'name', 'nohref', + 'noshade', 'nowrap', 'rel', 'rev', 'rows', 'rowspan', 'rules', 'scope', + 'span', 'src', 'start', 'summary', 'title', 'type', 'valign', 'vspace', + 'width') + + allowed_elements = acceptable_elements + allowed_attributes = acceptable_attributes + allowed_css_properties = () + allowed_css_keywords = () + allowed_svg_properties = () + +class HTMLSanitizer(tokenizer.HTMLTokenizer, HTMLSanitizerMixin): + def __init__(self, stream, encoding=None, parseMeta=True, useChardet=True, + lowercaseElementName=True, lowercaseAttrName=True): + tokenizer.HTMLTokenizer.__init__(self, stream, encoding, parseMeta, + useChardet, lowercaseElementName, + lowercaseAttrName) + + def __iter__(self): + for token in tokenizer.HTMLTokenizer.__iter__(self): + token = self.sanitize_token(token) + if token: + yield token + +def sanitize_html(html): + """Sanitizes an HTML fragment.""" + p = html5lib.HTMLParser(tokenizer=HTMLSanitizer, + tree=treebuilders.getTreeBuilder("dom")) + dom_tree = p.parseFragment(html) + walker = treewalkers.getTreeWalker("dom") + stream = walker(dom_tree) + s = serializer.HTMLSerializer(omit_optional_tags=False, + quote_attr_values=True) + output_generator = s.serialize(stream) + return u''.join(output_generator) diff --git a/ietf/utils/listop.py b/ietf/utils/listop.py deleted file mode 100644 index 498fadd1a..000000000 --- a/ietf/utils/listop.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright The IETF Trust 2007, All Rights Reserved - -import operator - -def orl(list): - """ Return the "or" of every element in a list. - Used to generate "or" queries with a list of Q objects. """ - if list: - return reduce(operator.__or__, list) - else: - return None - -def flattenl(list): - """ Flatten a list one level, e.g., turn - [ ['a'], ['b'], ['c', 'd'] ] into - [ 'a', 'b', 'c', 'd' ] - """ - if list: - return reduce(operator.__concat__, list) - else: - return [] \ No newline at end of file diff --git a/ietf/utils/log.py b/ietf/utils/log.py index 8b8d0d4f6..ba4efba4c 100644 --- a/ietf/utils/log.py +++ b/ietf/utils/log.py @@ -6,8 +6,6 @@ import os.path import ietf from django.conf import settings -syslog.openlog("django", syslog.LOG_PID, syslog.LOG_USER) - def getclass(frame): cls = None argnames, varargs, varkw, defaults = inspect.getargvalues(frame) @@ -23,13 +21,18 @@ def getcaller(): return (pmodule, pclass, pfunction, pfile, pline) def log(msg): - mod, cls, func, file, line = getcaller() - file = os.path.abspath(file) - file = file.replace(settings.BASE_DIR, "") - if func == "": - where = "" - else: - where = " in " + func + "()" + if isinstance(msg, unicode): + msg = msg.encode('unicode_escape') + try: + mod, cls, func, file, line = getcaller() + file = os.path.abspath(file) + file = file.replace(settings.BASE_DIR, "") + if func == "": + where = "" + else: + where = " in " + func + "()" + except IndexError: + file, line, where = "/", 0, "" syslog.syslog("ietf%s(%d)%s: %s" % (file, line, where, msg)) log("IETFdb v%s started" % ietf.__version__) diff --git a/ietf/utils/mail.py b/ietf/utils/mail.py index 96dae130b..0b947ca3c 100644 --- a/ietf/utils/mail.py +++ b/ietf/utils/mail.py @@ -4,10 +4,12 @@ from email.Utils import make_msgid, formatdate, formataddr, parseaddr, getaddres from email.MIMEText import MIMEText from email.MIMEMessage import MIMEMessage from email.MIMEMultipart import MIMEMultipart +from email import message_from_string import smtplib from django.conf import settings +from django.core.exceptions import ImproperlyConfigured from django.template.loader import render_to_string -from django.template import RequestContext +from django.template import Context,RequestContext from ietf.utils import log import sys import time @@ -21,7 +23,7 @@ def add_headers(msg): msg['From'] = settings.DEFAULT_FROM_EMAIL return msg -def send_smtp(msg): +def send_smtp(msg, bcc=None): ''' Send a Message via SMTP, based on the django email server settings. The destination list will be taken from the To:/Cc: headers in the @@ -30,13 +32,26 @@ def send_smtp(msg): ''' add_headers(msg) (fname, frm) = parseaddr(msg.get('From')) - to = [addr for name, addr in getaddresses(msg.get_all('To') + msg.get_all('Cc', []))] + addrlist = msg.get_all('To') + msg.get_all('Cc', []) + if bcc: + addrlist += [bcc] + to = [addr for name, addr in getaddresses(addrlist)] server = None try: - server = smtplib.SMTP(settings.EMAIL_HOST, settings.EMAIL_PORT) + server = smtplib.SMTP() if settings.DEBUG: server.set_debuglevel(1) + server.connect(settings.EMAIL_HOST, settings.EMAIL_PORT) if settings.EMAIL_HOST_USER and settings.EMAIL_HOST_PASSWORD: + server.ehlo() + if 'starttls' not in server.esmtp_features: + raise ImproperlyConfigured('password configured but starttls not supported') + (retval, retmsg) = server.starttls() + if retval != 220: + raise ImproperlyConfigured('password configured but tls failed: %d %s' % ( retval, retmsg )) + # Send a new EHLO, since without TLS the server might not + # advertise the AUTH capability. + server.ehlo() server.login(settings.EMAIL_HOST_USER, settings.EMAIL_HOST_PASSWORD) server.sendmail(frm, to, msg.as_string()) # note: should pay attention to the return code, as it may @@ -53,11 +68,14 @@ def send_smtp(msg): server.quit() log("sent email from '%s' to %s subject '%s'" % (frm, to, msg.get('Subject', '[no subject]'))) -def copy_email(msg, to): +def copy_email(msg, to, toUser=False): ''' Send a copy of the given email message to the given recipient. ''' add_headers(msg) + # Overwrite the From: header, so that the copy from a development or + # test server doesn't look like spam. + msg['From'] = settings.DEFAULT_FROM_EMAIL new = MIMEMultipart() # get info for first part. # Mode: if it's production, then "copy of a message", otherwise @@ -66,35 +84,47 @@ def copy_email(msg, to): # django settings if debugging? # Should this be a template? if settings.SERVER_MODE == 'production': - new.attach(MIMEText("This is a copy of a message sent from the I-D tracker.")) + explanation = "This is a copy of a message sent from the I-D tracker." + elif settings.SERVER_MODE == 'test' and toUser: + explanation = "The attached message was generated by an instance of the tracker\nin test mode. It is being sent to you because you, or someone acting\non your behalf, is testing the system. If you do not recognize\nthis action, please accept our apologies and do not be concerned as\nthe action is being taken in a test context." else: - new.attach(MIMEText("The attached message would have been sent, but the tracker is in %s mode.\nIt was not sent to anybody.\n" % settings.SERVER_MODE)) + explanation = "The attached message would have been sent, but the tracker is in %s mode.\nIt was not sent to anybody." % settings.SERVER_MODE + new.attach(MIMEText(explanation + "\n\n")) new.attach(MIMEMessage(msg)) new['From'] = msg['From'] new['Subject'] = '[Django %s] %s' % (settings.SERVER_MODE, msg.get('Subject', '[no subject]')) new['To'] = to send_smtp(new) -def send_mail_subj(request, to, frm, stemplate, template, context, cc=None, extra=None): +def mail_context(request): + if request: + return RequestContext(request) + else: + return Context() + +def send_mail_subj(request, to, frm, stemplate, template, context, *args, **kwargs): ''' Send an email message, exactly as send_mail(), but the subject field is a template. ''' - subject = render_to_string(stemplate, context, context_instance=RequestContext(request)).replace("\n"," ").strip() - return send_mail(request, to, frm, subject, template, context, cc, extra) + subject = render_to_string(stemplate, context, context_instance=mail_context(request)).replace("\n"," ").strip() + return send_mail(request, to, frm, subject, template, context, *args, **kwargs) -def send_mail(request, to, frm, subject, template, context, cc=None, extra=None): +def send_mail(request, to, frm, subject, template, context, *args, **kwargs): ''' Send an email to the destination [list], with the given return address (or "None" to use the default in settings.py). The body is a text/plain rendering of the template with the context. extra is a dict of extra headers to add. ''' - txt = render_to_string(template, context, context_instance=RequestContext(request)) - return send_mail_text(request, to, frm, subject, txt, cc, extra) + txt = render_to_string(template, context, context_instance=mail_context(request)) + return send_mail_text(request, to, frm, subject, txt, *args, **kwargs) -def send_mail_text(request, to, frm,subject, txt, cc=None, extra=None): - msg = MIMEText(txt) +def send_mail_text(request, to, frm, subject, txt, cc=None, extra=None, toUser=None, bcc=None): + if isinstance(txt, unicode): + msg = MIMEText(txt.encode('utf-8'), 'plain', 'UTF-8') + else: + msg = MIMEText(txt) if isinstance(frm, tuple): frm = formataddr(frm) if isinstance(to, list) or isinstance(to, tuple): @@ -112,5 +142,24 @@ def send_mail_text(request, to, frm,subject, txt, cc=None, extra=None): for k, v in extra.iteritems(): msg[k] = v if settings.SERVER_MODE == 'production': - send_smtp(msg) - copy_email(msg, "ietf.tracker.archive+%s@gmail.com" % settings.SERVER_MODE) + send_smtp(msg, bcc) + elif settings.SERVER_MODE == 'test': + if toUser: + copy_email(msg, to, toUser=True) + elif request and request.COOKIES.has_key( 'testmailcc' ): + copy_email(msg, request.COOKIES[ 'testmailcc' ]) + try: + copy_to = settings.EMAIL_COPY_TO + except AttributeError: + copy_to = "ietf.tracker.archive+%s@gmail.com" % settings.SERVER_MODE + if bcc: + msg['X-Tracker-Bcc']=bcc + copy_email(msg, copy_to) + +def send_mail_preformatted(request, preformatted): + """Parse preformatted string containing mail with From:, To:, ..., + and send it through the standard IETF mail interface (inserting + extra headers as needed).""" + + msg = message_from_string(preformatted.encode("utf-8")) + send_mail_text(request, msg['To'], msg["From"], msg["Subject"], msg.get_payload(), cc=msg["Cc"], bcc=msg["Bcc"]) diff --git a/ietf/utils/soup2text.py b/ietf/utils/soup2text.py deleted file mode 100755 index 2d5a76a4c..000000000 --- a/ietf/utils/soup2text.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python -# Copyright The IETF Trust 2007, All Rights Reserved - -import re -import textwrap -try: - from ietf.contrib.BeautifulSoup import Tag, BeautifulSoup, NavigableString -except: - from BeautifulSoup import Tag, BeautifulSoup, NavigableString - -block_tags = ["[document]", "html", "body", "div", "blockquote", "table", "tr", "p", "pre", "h1", "h2", "h3", "h4", "h5", "h6", "li", "option"] -space_tags = ["th", "td"] -break_tags = ["br"] -ignore_tags = ["head", "script", "style"] -pre_tags = ["pre", "option"] -entities = [("<", "<"), (">", ">"), - (""", '"'), ("'", "'"), - (" ", " "), - ("&", "&"), ] # ampersand last - -def unescape(text): - # Unescape character codes (if possible) - start = 0 - while True: - try: - pos = text.index("&#", start) - except ValueError: - break - match = re.match("&#\d+;", text[pos:]) - if match: - str = match.group() - num = int(str[2:-1]) - if num < 256: - text = text[:pos] + chr(num) + text[pos+len(str):] - start = pos + 1 - else: - start = pos + len(str) - else: - start = pos + 2 - # unescape character entities - for entity, char in entities: - text = text.replace(entity, char) # replace ampersand last - return text - -def para(words, pre, fill): - text = "".join(words) - text = unescape(text) - if not pre: - text = text.strip("\n") - text = text.lstrip() - text = re.sub("[\t\n ]+", " ", text) - if fill: - text = textwrap.fill(text) - return text - -def normalize(str): - # Normalize whitespace at the beginning and end of the string - str = re.sub("^[ \t]+", " ", str) - str = re.sub("[ \t]+$", " ", str) - # remove xml PIs and metainformation - str = re.sub("]*>", "", str) - str = re.sub("<\?[^>]*\?>", "", str) - return str - -def render(node, encoding='latin-1', pre=False, fill=True, clean=True): - blocks = [] - words = [] - node.pre = pre or node.name in pre_tags - node.is_block = node.name in block_tags - for child in node: - if isinstance(child, NavigableString): - str = child.__str__(encoding) - if str and not node.pre: - str = normalize(str) - if str: - words.append(str) - elif isinstance(child, Tag): - if child.name in ignore_tags: - pass - else: - child = render(child, encoding, node.pre, fill, clean) - if child.text: - if child.is_block: - if words : - blocks.append(para(words, node.pre, fill)+"\n") - words = [] - blocks.append(child.text+"\n\n") - node.is_block = True - else: - words.append(child.text) - if child.text[-1] not in [" ", "\t", "\n"]: - if child.name in space_tags: - words.append(" ") - if child.name in break_tags: - words.append("\n") - else: - raise ValueError("Unexpected node type: '%s'" % child) - if words: - blocks.append(para(words, node.pre, fill)) - - node.text = ''.join(blocks) - return node - -class TextSoup(BeautifulSoup): - - def as_text(self, encoding='latin-1', pre=False, fill=True, clean=True): - node = render(self, encoding, pre, fill, clean) - str = node.text - if clean: - str = re.sub("[ \t]+", " ", str) - str = re.sub("\n\n+", "\n\n", str) - return str - - - def __str__(self, encoding='latin-1', - prettyPrint=False, indentLevel=0): - node = render(self, encoding, fill=False) - str = node.text - str = re.sub("[ \t]+", " ", str) - str = re.sub("\n\n+", "\n\n", str) - return str - -def soup2text(html, encoding='latin-1', pre=False, fill=True): - # Line ending normalization - html = html.replace("\r\n", "\n").replace("\r", "\n") - # remove comments - html = re.sub("(?s)", "", html) - # some preprocessing to handle common pathological cases - html = re.sub("
    [ \t\n]*(
    )+", "

    ", html) - html = re.sub("
    ([^\n])", r"
    \n\1", html) - soup = TextSoup(html) - return soup.as_text(encoding, pre, fill) - -if __name__ == "__main__": - import sys - import urllib2 as urllib - for arg in sys.argv[1:]: - if arg[:6] in ["http:/", "https:", "ftp://"]: - file = urllib.urlopen(arg) - else: - file = open(arg) - html = file.read() - file.close() - print soup2text(html) diff --git a/ietf/utils/templated_form.py b/ietf/utils/templated_form.py deleted file mode 100644 index 54e9c9073..000000000 --- a/ietf/utils/templated_form.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright The IETF Trust 2007, All Rights Reserved - -from django.utils.html import escape - -def makeTemplatedForm(template=None): - """Create a form class which formats its fields using the provided template - - The template is provided with a dictionary containing the following key-value - pairs: - - "label": field label, if any, - "errors": list of errors, if any, - "text": widget rendering for an unbound form / field value for a bound form, - "help_text": field help text, if any - """ - from django.template import loader - import django.newforms as forms - - class TemplatedForm(forms.BaseForm): - _template = template - def __getitem__(self, name): - "Returns a rendered field with the given name." - #syslog.syslog("FormattingForm.__getitem__(%s)" % (name, )) - try: - field = self.fields[name] - except KeyError: - raise KeyError('Key %r not found in Form' % name) - if not isinstance(field, forms.fields.Field): - return field - bf = forms.forms.BoundField(self, field, name) - errors = [escape(error) for error in bf.errors] - rendering = loader.render_to_string(self._template, { "errors": errors, "label": bf.label, "text": unicode(bf), "help_text": field.help_text, "field":field }) - return rendering - return TemplatedForm diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py new file mode 100644 index 000000000..37b44d950 --- /dev/null +++ b/ietf/utils/test_runner.py @@ -0,0 +1,94 @@ +# Copyright The IETF Trust 2007, All Rights Reserved + +# Portion Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. Contact: Pasi Eronen +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Nokia Corporation and/or its +# subsidiary(-ies) nor the names of its contributors may be used +# to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import socket +from django.conf import settings +from django.template import TemplateDoesNotExist + +mail_outbox = [] +loaded_templates = set() +test_database_name = None +old_destroy = None +old_create = None + +def safe_create_1(self, verbosity, *args, **kwargs): + global test_database_name, old_create + print " Creating test database..." + x = old_create(self, 0, *args, **kwargs) + print " Saving test database name "+settings.DATABASE_NAME+"..." + test_database_name = settings.DATABASE_NAME + return x + +def safe_destroy_0_1(*args, **kwargs): + global test_database_name, old_destroy + print " Checking that it's safe to destroy test database..." + if settings.DATABASE_NAME != test_database_name: + print " NOT SAFE; Changing settings.DATABASE_NAME from "+settings.DATABASE_NAME+" to "+test_database_name + settings.DATABASE_NAME = test_database_name + return old_destroy(*args, **kwargs) + +def test_send_smtp(msg, bcc=None): + global mail_outbox + mail_outbox.append(msg) + +def template_coverage_loader(template_name, dirs): + loaded_templates.add(str(template_name)) + raise TemplateDoesNotExist + +template_coverage_loader.is_usable = True + +def run_tests_1(test_labels, *args, **kwargs): + global old_destroy, old_create, test_database_name + from django.db import connection + old_create = connection.creation.__class__.create_test_db + connection.creation.__class__.create_test_db = safe_create_1 + old_destroy = connection.creation.__class__.destroy_test_db + connection.creation.__class__.destroy_test_db = safe_destroy_0_1 + from django.test.simple import run_tests + if not test_labels: + settings.TEMPLATE_LOADERS = ('ietf.utils.test_runner.template_coverage_loader',) + settings.TEMPLATE_LOADERS + test_labels = [x.split(".")[-1] for x in settings.INSTALLED_APPS if x.startswith("ietf")] + ['redirects.TemplateCoverageTestCase',] + kwargs["verbosity"] = 0 + run_tests(test_labels, *args, **kwargs) + +def run_tests(*args, **kwargs): + # Tests that involve switching back and forth between the real + # database and the test database are way too dangerous to run + # against the production database + if socket.gethostname().startswith("core3"): + raise EnvironmentError("Refusing to run tests on core3") + import ietf.utils.mail + ietf.utils.mail.send_smtp = test_send_smtp + run_tests_1(*args, **kwargs) + diff --git a/ietf/utils/test_utils.py b/ietf/utils/test_utils.py new file mode 100644 index 000000000..14461faf2 --- /dev/null +++ b/ietf/utils/test_utils.py @@ -0,0 +1,217 @@ +# Copyright The IETF Trust 2007, All Rights Reserved + +# Portion Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. Contact: Pasi Eronen +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Nokia Corporation and/or its +# subsidiary(-ies) nor the names of its contributors may be used +# to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import re +import django +from django.db import connection +from django.test import TestCase +from django.test.client import Client +import ietf +from django.conf import settings +from datetime import datetime +import urllib2 as urllib +from difflib import unified_diff + + +import traceback + +class RealDatabaseTest: + def setUpRealDatabase(self): + self._original_testdb = self._getDatabaseName() + newdb = ietf.settings.DATABASE_NAME + print " Switching database from "+self._original_testdb+" to "+newdb + self._setDatabaseName(newdb) + + def tearDownRealDatabase(self): + curdb = self._getDatabaseName() + print " Switching database from "+curdb+" to "+self._original_testdb + self._setDatabaseName(self._original_testdb) + + def _getDatabaseName(self): + return connection.settings_dict['DATABASE_NAME'] + + def _setDatabaseName(self, name): + connection.close() + django.conf.settings.DATABASE_NAME = name + connection.settings_dict['DATABASE_NAME'] = name + connection.cursor() + +def read_testurls(filename): + tuples = [] + file = open(filename) + for line in file: + line = line.strip() + if line and not line.startswith('#'): + line = line.split("#", 1)[0] + urlspec = line.split() + if len(urlspec) == 2: + codes, testurl = urlspec + goodurl = None + elif len(urlspec) == 3: + codes, testurl, goodurl = urlspec + # strip protocol and host -- we're making that configurable + goodurl = re.sub("^https?://[a-z0-9.]+", "", goodurl) + if not goodurl.startswith("/"): + goodurl = "/" + goodurl + else: + raise ValueError("Expected 'HTTP_CODE TESTURL [GOODURL]' in %s line, found '%s'." % (filename, line)) + + + codes = dict([ (item, "") for item in codes.split(",") if not":" in item] + + [ (item.split(":")[:2]) for item in codes.split(",") if ":" in item] ) + tuples += [ (codes, testurl, goodurl) ] + file.close() + return tuples + +def split_url(url): + if "?" in url: + url, args = url.split("?", 1) + args = dict([ map(urllib.unquote,arg.split("=", 1)) for arg in args.split("&") if "=" in arg ]) + else: + args = {} + return url, args + +class SimpleUrlTestCase(TestCase,RealDatabaseTest): + + def setUp(self): + self.setUpRealDatabase() + self.client = Client() + self.ref_prefix = os.environ.get("IETFDB_REF_PREFIX", "") + if self.ref_prefix.endswith("/"): + self.ref_prefix = self.ref_prefix[:-1] + self.skip_heavy_tests = os.environ.get("IETFDB_SKIP_HEAVY", False) + + def tearDown(self): + self.tearDownRealDatabase() + + def doTestUrls(self, test_filename): + filename = os.path.dirname(os.path.abspath(test_filename))+"/testurl.list" + print " Reading "+filename + tuples = read_testurls(filename) + failures = 0 + for tuple in tuples: + try: + self.doTestUrl(tuple) + except: + failures = failures + 1 + self.assertEqual(failures, 0, "%d URLs failed" % failures) + + def doTestUrl(self, tuple): + (codes, url, master) = tuple + baseurl, args = split_url(url) + failed = False + #enable this to see query counts + #settings.DEBUG = True + try: + if "heavy" in codes and self.skip_heavy_tests: + print " Skipping heavy test %s" % (url,) + return + now = datetime.utcnow() + response = self.client.get(baseurl, args) + elapsed_dt = datetime.utcnow()-now + elapsed = elapsed_dt.seconds + elapsed_dt.microseconds/1e6 + code = str(response.status_code) + queries = len(connection.queries) + if code in codes: + print "OK %s %s" % (code, url) + else: + print "Fail %s %s" % (code, url) + failed = True + if queries > 0: + print " (%.1f s, %d kB, %d queries)" % (elapsed, len(response.content)/1000, queries) + else: + print " (%.1f s, %d kB)" % (elapsed, len(response.content)/1000) + if code in codes and code == "200": + self.doDiff(tuple, response) + except: + failed = True + print "Exception for URL '%s'" % url + traceback.print_exc() + self.assertEqual(failed, False) + + # Override this in subclasses if needed + def doCanonicalize(self, url, content): + return content + + def doDiff(self, tuple, response): + if not self.ref_prefix: + return + (codes, url, master) = tuple + if "skipdiff" in codes: + return + refurl = self.ref_prefix+url + print " Fetching "+refurl + refhtml = None + try: + mfile = urllib.urlopen(refurl) + refhtml = mfile.read() + mfile.close() + except Exception, e: + print " Error retrieving %s: %s" % (refurl, e) + return + testhtml = self.doCanonicalize(url, response.content) + refhtml = self.doCanonicalize(url, refhtml) + #print "REFERENCE:\n----------------------\n"+refhtml+"\n-------------\n" + #print "TEST:\n----------------------\n"+testhtml+"\n-------------\n" + + list0 = refhtml.split("\n") + list1 = testhtml.split("\n") + diff = "\n".join(unified_diff(list0, list1, refurl, url, "", "", 0, lineterm="")) + if diff: + print " Differences found:" + print diff + else: + print " No differences found" + +def canonicalize_feed(s): + # Django 0.96 handled time zone different -- ignore it for now + s = re.sub(r"(\d\d\d\d-\d\d-\d\dT)\d\d(:\d\d:\d\d)(Z|-08:00)()",r"\g<1>00\g<2>Z\g<4>", s) + # Insert newline before tags to make diff easier to read + s = re.sub("\n*\s*(<[a-zA-Z])", "\n\g<1>", s) + return s + +def canonicalize_sitemap(s): + s = re.sub("> <", "><", s) + # Insert newline before tags to make diff easier to read + s = re.sub("\n*\s*(<[a-zA-Z])", "\n\g<1>", s) + return s + +def login_testing_unauthorized(tc, remote_user, url): + r = tc.client.get(url) + tc.assertEquals(r.status_code, 302) + tc.assertTrue("/accounts/login" in r['Location']) + + tc.client.login(remote_user=remote_user) + diff --git a/ietf/utils/testurl.list b/ietf/utils/testurl.list deleted file mode 100644 index f78c5ad48..000000000 --- a/ietf/utils/testurl.list +++ /dev/null @@ -1,4 +0,0 @@ -200 /review/ -200 /review/0/ -200 /review/top/17/ -200 /review/all/ diff --git a/ietf/utils/views.py b/ietf/utils/views.py deleted file mode 100644 index 1b02943a6..000000000 --- a/ietf/utils/views.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright The IETF Trust 2007, All Rights Reserved - -from django.shortcuts import render_to_response as render - -testurls = [] -urlcount = 0 -hash2url = {} -num2hash = {} -hash2num = {} -host = "merlot.tools.ietf.org:31415" - -def get_info(page): - global testurls - global hash2url - global num2hash - global hash2num - global urlcount - if not testurls: - from ietf.tests import get_testurls - testurls = [ tuple for tuple in get_testurls() if tuple[2] and "200" in tuple[0] ] - urlcount = len(testurls) - num2hash = dict([ (i, "%x"% (testurls[i][1].__hash__() +0x80000000)) for i in range(urlcount)]) - hash2url = dict([ (num2hash[i], testurls[i][1]) for i in range(urlcount)]) - hash2num = dict([ (num2hash[num], num) for num in num2hash ]) - - info = {} - try: - page = int(page) - except: - pass - if page in num2hash: - page = num2hash[page] - if not page in hash2url: - page = num2hash[0] - hash = page - assert(hash not in num2hash) - num = hash2num[hash] - info["next"] = num2hash[ (num + 1) % urlcount ] - info["this"] = hash - info["prev"] = num2hash[ (num - 1 + urlcount) % urlcount ] - info["new"] = "http://%s/%s" % (host, testurls[num][1][1:]) - info["old"] = testurls[num][2] - return info - -def review(request, page=0, panes=None): - return render("utils/frame2.html", {"info": get_info(page) }) - -def top(request, page=0): - return render("utils/review.html", {"info": get_info(page) }) - -def all(request): - get_info(0) # prime the list - info = [] - for i in range(urlcount): - item = {} - item["num"] = num2hash[i] - item["new"] = testurls[i][1] - item["old"] = testurls[i][2] - info.append(item) - - return render("utils/all.html", {"info": info, "count": len(info) }) \ No newline at end of file diff --git a/ietf/views.py b/ietf/views.py deleted file mode 100644 index 1c2805fee..000000000 --- a/ietf/views.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright The IETF Trust 2007, All Rights Reserved - -from django.shortcuts import render_to_response as render -import urls -import re - -def apps(request): - paths = [] - for pattern in urls.urlpatterns: - path = pattern.regex.pattern.split("/")[0][1:] - if not re.search("[^a-z]", path) and not path in ["my", "feeds"]: - paths.append(path) - apps = list(set(paths)) - apps.sort() - return render("apps.html", {"apps": apps }) \ No newline at end of file diff --git a/ietf/wginfo/.gitignore b/ietf/wginfo/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/ietf/wginfo/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/ietf/wginfo/__init__.py b/ietf/wginfo/__init__.py new file mode 100644 index 000000000..5ed40b97d --- /dev/null +++ b/ietf/wginfo/__init__.py @@ -0,0 +1,2 @@ +# Copyright The IETF Trust 2008, All Rights Reserved + diff --git a/ietf/wginfo/models.py b/ietf/wginfo/models.py new file mode 100644 index 000000000..5ed40b97d --- /dev/null +++ b/ietf/wginfo/models.py @@ -0,0 +1,2 @@ +# Copyright The IETF Trust 2008, All Rights Reserved + diff --git a/ietf/wginfo/tests.py b/ietf/wginfo/tests.py new file mode 100644 index 000000000..0742d5eed --- /dev/null +++ b/ietf/wginfo/tests.py @@ -0,0 +1,54 @@ +# Portions Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. Contact: Pasi Eronen +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Nokia Corporation and/or its +# subsidiary(-ies) nor the names of its contributors may be used +# to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import unittest +from django.conf import settings +from ietf.utils.test_utils import SimpleUrlTestCase + +class WgInfoUrlTestCase(SimpleUrlTestCase): + def testUrls(self): + self.doTestUrls(__file__) + +class WgFileTestCase(unittest.TestCase): + def testFileExistence(self): + print " Testing if WG charter files exist locally" + fpath = os.path.join(settings.IETFWG_DESCRIPTIONS_PATH, "tls.desc.txt") + if not os.path.exists(fpath): + print "\nERROR: charter files not found in "+settings.IETFWG_DESCRIPTIONS_PATH + print "They are needed for testing WG charter pages." + print "Download them to a local directory with:" + print "wget -nd -nc -np -r http://www.ietf.org/wg-descriptions/" + print "And set IETFWG_DESCRIPTIONS_PATH in settings_local.py\n" + else: + print "OK (they seem to exist)" + diff --git a/ietf/wginfo/testurl.list b/ietf/wginfo/testurl.list new file mode 100644 index 000000000..2cf38b30a --- /dev/null +++ b/ietf/wginfo/testurl.list @@ -0,0 +1,19 @@ +200 /wg/ +404 /wg/nosuchgroup/ +200 /wg/tls/ +200 /wg/tls/charter/ +200 /wg/mobike/ # concluded +200 /wg/mobike/charter/ +200 /wg/catnip/ # concluded very long time ago +200 /wg/catnip/charter/ # concluded very long time ago +404 /wg/saag/ # not a WG +404 /wg/saag/charter/ # not a WG + +200 /wg/1wg-summary.txt +200 /wg/1wg-summary-by-acronym.txt +301 /wg/summary.txt +301 /wg/summary-by-area.txt +301 /wg/summary-by-acronym.txt +200,heavy /wg/1wg-charters.txt +200,heavy /wg/1wg-charters-by-acronym.txt + diff --git a/ietf/wginfo/urls.py b/ietf/wginfo/urls.py new file mode 100644 index 000000000..00475e7d9 --- /dev/null +++ b/ietf/wginfo/urls.py @@ -0,0 +1,18 @@ +# Copyright The IETF Trust 2008, All Rights Reserved + +from django.conf.urls.defaults import patterns +from ietf.wginfo import views +from django.views.generic.simple import redirect_to + +urlpatterns = patterns('', + (r'^$', views.wg_dir), + (r'^summary.txt', redirect_to, { 'url':'/wg/1wg-summary.txt' }), + (r'^summary-by-area.txt', redirect_to, { 'url':'/wg/1wg-summary.txt' }), + (r'^summary-by-acronym.txt', redirect_to, { 'url':'/wg/1wg-summary-by-acronym.txt' }), + (r'^1wg-summary.txt', views.wg_summary_area), + (r'^1wg-summary-by-acronym.txt', views.wg_summary_acronym), + (r'^1wg-charters.txt', views.wg_charters), + (r'^1wg-charters-by-acronym.txt', views.wg_charters_by_acronym), + (r'^(?P[^/]+)/$', views.wg_documents), + (r'^(?P[^/]+)/charter/$', views.wg_charter), +) diff --git a/ietf/wginfo/views.py b/ietf/wginfo/views.py new file mode 100644 index 000000000..fbda64aad --- /dev/null +++ b/ietf/wginfo/views.py @@ -0,0 +1,75 @@ +# Copyright The IETF Trust 2008, All Rights Reserved + +# Portion Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. Contact: Pasi Eronen +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Nokia Corporation and/or its +# subsidiary(-ies) nor the names of its contributors may be used +# to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from ietf.idtracker.models import Area, IETFWG +from django.shortcuts import get_object_or_404, render_to_response +from django.template import RequestContext, loader +from django.http import HttpResponse +from ietf.idrfc.views_search import SearchForm, search_query + +def wg_summary_acronym(request): + areas = Area.active_areas() + wgs = IETFWG.objects.filter(status=IETFWG.ACTIVE) + return HttpResponse(loader.render_to_string('wginfo/1wg-summary-by-acronym.txt', {'area_list': areas, 'wg_list': wgs}),mimetype='text/plain; charset=UTF-8') + +def wg_summary_area(request): + wgs = IETFWG.objects.filter(status='1',start_date__isnull=False) + return HttpResponse(loader.render_to_string('wginfo/1wg-summary.txt', {'wg_list': wgs}),mimetype='text/plain; charset=UTF-8') + +def wg_charters(request): + wgs = IETFWG.objects.filter(status='1',start_date__isnull=False) + return HttpResponse(loader.render_to_string('wginfo/1wg-charters.txt', {'wg_list': wgs}),mimetype='text/plain; charset=UTF-8') + +def wg_charters_by_acronym(request): + wgs = IETFWG.objects.filter(status='1',start_date__isnull=False) + return HttpResponse(loader.render_to_string('wginfo/1wg-charters-by-acronym.txt', {'wg_list': wgs}),mimetype='text/plain; charset=UTF-8') + +def wg_dir(request): + areas = Area.active_areas() + return render_to_response('wginfo/wg-dir.html', {'areas':areas}, RequestContext(request)) + +def wg_documents(request, acronym): + wg = get_object_or_404(IETFWG, group_acronym__acronym=acronym, group_type=1) + concluded = (wg.status_id != 1) + form = SearchForm({'by':'group', 'group':str(wg.group_acronym.acronym), + 'rfcs':'on', 'activeDrafts':'on'}) + if not form.is_valid(): + raise ValueError("form did not validate") + (docs,meta) = search_query(form.cleaned_data) + return render_to_response('wginfo/wg_documents.html', {'wg': wg, 'concluded':concluded, 'selected':'documents', 'docs':docs, 'meta':meta}, RequestContext(request)) + +def wg_charter(request, acronym): + wg = get_object_or_404(IETFWG, group_acronym__acronym=acronym, group_type=1) + concluded = (wg.status_id != 1) + return render_to_response('wginfo/wg_charter.html', {'wg': wg, 'concluded':concluded, 'selected':'charter'}, RequestContext(request)) diff --git a/south/.gitignore b/south/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/__init__.py b/south/__init__.py new file mode 100644 index 000000000..4eca0015c --- /dev/null +++ b/south/__init__.py @@ -0,0 +1,6 @@ +""" +South - Useable migrations for Django apps +""" + +__version__ = "0.6.2" +__authors__ = ["Andrew Godwin ", "Andy McCurdy "] diff --git a/south/db/.gitignore b/south/db/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/db/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/db/__init__.py b/south/db/__init__.py new file mode 100644 index 000000000..ef70c5bed --- /dev/null +++ b/south/db/__init__.py @@ -0,0 +1,17 @@ + +# Establish the common DatabaseOperations instance, which we call 'db'. +# This code somewhat lifted from django evolution +from django.conf import settings +import sys +if hasattr(settings, "SOUTH_DATABASE_ADAPTER"): + module_name = settings.SOUTH_DATABASE_ADAPTER +else: + module_name = '.'.join(['south.db', settings.DATABASE_ENGINE]) + +try: + module = __import__(module_name,{},{},['']) +except ImportError: + sys.stderr.write("There is no South database module for the engine '%s' (tried with %s). Please either choose a supported one, or check for SOUTH_DATABASE_ADAPTER settings, or remove South from INSTALLED_APPS.\n" + % (settings.DATABASE_ENGINE, module_name)) + sys.exit(1) +db = module.DatabaseOperations() diff --git a/south/db/generic.py b/south/db/generic.py new file mode 100644 index 000000000..c9b056233 --- /dev/null +++ b/south/db/generic.py @@ -0,0 +1,770 @@ + +import datetime +import string +import random +import re +import sys + +from django.core.management.color import no_style +from django.db import connection, transaction, models +from django.db.backends.util import truncate_name +from django.db.models.fields import NOT_PROVIDED +from django.dispatch import dispatcher +from django.conf import settings +from django.utils.datastructures import SortedDict + +from south.logger import get_logger + +def alias(attrname): + """ + Returns a function which calls 'attrname' - for function aliasing. + We can't just use foo = bar, as this breaks subclassing. + """ + def func(self, *args, **kwds): + return getattr(self, attrname)(*args, **kwds) + return func + + +class DatabaseOperations(object): + + """ + Generic SQL implementation of the DatabaseOperations. + Some of this code comes from Django Evolution. + """ + + # We assume the generic DB can handle DDL transactions. MySQL wil change this. + has_ddl_transactions = True + + alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s' + alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL' + alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL' + has_check_constraints = True + delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s' + allows_combined_alters = True + add_column_string = 'ALTER TABLE %s ADD COLUMN %s;' + delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s" + delete_foreign_key_sql = 'ALTER TABLE %s DROP CONSTRAINT %s' + supports_foreign_keys = True + max_index_name_length = 63 + drop_index_string = 'DROP INDEX %(index_name)s' + delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;' + create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)" + drop_primary_key_string = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s" + backend_name = None + + def __init__(self): + self.debug = False + self.deferred_sql = [] + self.dry_run = False + self.pending_transactions = 0 + self.pending_create_signals = [] + + + def connection_init(self): + """ + Run before any SQL to let database-specific config be sent as a command, + e.g. which storage engine (MySQL) or transaction serialisability level. + """ + pass + + + def execute(self, sql, params=[]): + """ + Executes the given SQL statement, with optional parameters. + If the instance's debug attribute is True, prints out what it executes. + """ + self.connection_init() + cursor = connection.cursor() + if self.debug: + print " = %s" % sql, params + + get_logger().debug('south execute "%s" with params "%s"' % (sql, params)) + + if self.dry_run: + return [] + + cursor.execute(sql, params) + try: + return cursor.fetchall() + except: + return [] + + + def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"): + """ + Takes a SQL file and executes it as many separate statements. + (Some backends, such as Postgres, don't work otherwise.) + """ + # Be warned: This function is full of dark magic. Make sure you really + # know regexes before trying to edit it. + # First, strip comments + sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()]) + # Now execute each statement + for st in re.split(regex, sql)[1:][::2]: + self.execute(st) + + + def add_deferred_sql(self, sql): + """ + Add a SQL statement to the deferred list, that won't be executed until + this instance's execute_deferred_sql method is run. + """ + self.deferred_sql.append(sql) + + + def execute_deferred_sql(self): + """ + Executes all deferred SQL, resetting the deferred_sql list + """ + for sql in self.deferred_sql: + self.execute(sql) + + self.deferred_sql = [] + + + def clear_deferred_sql(self): + """ + Resets the deferred_sql list to empty. + """ + self.deferred_sql = [] + + + def clear_run_data(self, pending_creates = None): + """ + Resets variables to how they should be before a run. Used for dry runs. + If you want, pass in an old panding_creates to reset to. + """ + self.clear_deferred_sql() + self.pending_create_signals = pending_creates or [] + + + def get_pending_creates(self): + return self.pending_create_signals + + + def create_table(self, table_name, fields): + """ + Creates the table 'table_name'. 'fields' is a tuple of fields, + each repsented by a 2-part tuple of field name and a + django.db.models.fields.Field object + """ + qn = connection.ops.quote_name + + # allow fields to be a dictionary + # removed for now - philosophical reasons (this is almost certainly not what you want) + #try: + # fields = fields.items() + #except AttributeError: + # pass + + if len(table_name) > 63: + print " ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL." + + columns = [ + self.column_sql(table_name, field_name, field) + for field_name, field in fields + ] + + self.execute('CREATE TABLE %s (%s);' % (qn(table_name), ', '.join([col for col in columns if col]))) + + add_table = alias('create_table') # Alias for consistency's sake + + + def rename_table(self, old_table_name, table_name): + """ + Renames the table 'old_table_name' to 'table_name'. + """ + if old_table_name == table_name: + # No Operation + return + qn = connection.ops.quote_name + params = (qn(old_table_name), qn(table_name)) + self.execute('ALTER TABLE %s RENAME TO %s;' % params) + + + def delete_table(self, table_name, cascade=True): + """ + Deletes the table 'table_name'. + """ + qn = connection.ops.quote_name + params = (qn(table_name), ) + if cascade: + self.execute('DROP TABLE %s CASCADE;' % params) + else: + self.execute('DROP TABLE %s;' % params) + + drop_table = alias('delete_table') + + + def clear_table(self, table_name): + """ + Deletes all rows from 'table_name'. + """ + qn = connection.ops.quote_name + params = (qn(table_name), ) + self.execute('DELETE FROM %s;' % params) + + + + def add_column(self, table_name, name, field, keep_default=True): + """ + Adds the column 'name' to the table 'table_name'. + Uses the 'field' paramater, a django.db.models.fields.Field instance, + to generate the necessary sql + + @param table_name: The name of the table to add the column to + @param name: The name of the column to add + @param field: The field to use + """ + qn = connection.ops.quote_name + sql = self.column_sql(table_name, name, field) + if sql: + params = ( + qn(table_name), + sql, + ) + sql = self.add_column_string % params + self.execute(sql) + + # Now, drop the default if we need to + if not keep_default and field.default is not None: + field.default = NOT_PROVIDED + self.alter_column(table_name, name, field, explicit_name=False) + + + def _db_type_for_alter_column(self, field): + """ + Returns a field's type suitable for ALTER COLUMN. + By default it just returns field.db_type(). + To be overriden by backend specific subclasses + @param field: The field to generate type for + """ + return field.db_type() + + def alter_column(self, table_name, name, field, explicit_name=True): + """ + Alters the given column name so it will match the given field. + Note that conversion between the two by the database must be possible. + Will not automatically add _id by default; to have this behavour, pass + explicit_name=False. + + @param table_name: The name of the table to add the column to + @param name: The name of the column to alter + @param field: The new field definition to use + """ + + # hook for the field to do any resolution prior to it's attributes being queried + if hasattr(field, 'south_init'): + field.south_init() + + qn = connection.ops.quote_name + + # Add _id or whatever if we need to + field.set_attributes_from_name(name) + if not explicit_name: + name = field.column + + # Drop all check constraints. TODO: Add the right ones back. + if self.has_check_constraints: + check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK") + for constraint in check_constraints: + self.execute(self.delete_check_sql % {'table': qn(table_name), 'constraint': qn(constraint)}) + + # First, change the type + params = { + "column": qn(name), + "type": self._db_type_for_alter_column(field) + } + + # SQLs is a list of (SQL, values) pairs. + sqls = [(self.alter_string_set_type % params, [])] + + # Next, set any default + if not field.null and field.has_default(): + default = field.get_default() + sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (qn(name),), [default])) + else: + sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (qn(name),), [])) + + + # Next, nullity + params = { + "column": qn(name), + "type": field.db_type(), + } + if field.null: + sqls.append((self.alter_string_set_null % params, [])) + else: + sqls.append((self.alter_string_drop_null % params, [])) + + # TODO: Unique + + if self.allows_combined_alters: + sqls, values = zip(*sqls) + self.execute( + "ALTER TABLE %s %s;" % (qn(table_name), ", ".join(sqls)), + flatten(values), + ) + else: + # Databases like e.g. MySQL don't like more than one alter at once. + for sql, values in sqls: + self.execute("ALTER TABLE %s %s;" % (qn(table_name), sql), values) + + + def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"): + """ + Gets the names of the constraints affecting the given columns. + """ + + if self.dry_run: + raise ValueError("Cannot get constraints for columns during a dry run.") + + columns = set(columns) + + if type == "CHECK": + ifsc_table = "constraint_column_usage" + else: + ifsc_table = "key_column_usage" + + # First, load all constraint->col mappings for this table. + rows = self.execute(""" + SELECT kc.constraint_name, kc.column_name + FROM information_schema.%s AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + kc.table_schema = %%s AND + kc.table_name = %%s AND + c.constraint_type = %%s + """ % ifsc_table, ['public', table_name, type]) + # Load into a dict + mapping = {} + for constraint, column in rows: + mapping.setdefault(constraint, set()) + mapping[constraint].add(column) + # Find ones affecting these columns + for constraint, itscols in mapping.items(): + if itscols == columns: + yield constraint + + + def create_unique(self, table_name, columns): + """ + Creates a UNIQUE constraint on the columns on the given table. + """ + qn = connection.ops.quote_name + + if not isinstance(columns, (list, tuple)): + columns = [columns] + + name = self.create_index_name(table_name, columns, suffix="_uniq") + + cols = ", ".join(map(qn, columns)) + self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % (qn(table_name), qn(name), cols)) + return name + + + + def delete_unique(self, table_name, columns): + """ + Deletes a UNIQUE constraint on precisely the columns on the given table. + """ + qn = connection.ops.quote_name + + if not isinstance(columns, (list, tuple)): + columns = [columns] + + # Dry runs mean we can't do anything. + if self.dry_run: + return + + constraints = list(self._constraints_affecting_columns(table_name, columns)) + if not constraints: + raise ValueError("Cannot find a UNIQUE constraint on table %s, columns %r" % (table_name, columns)) + for constraint in constraints: + self.execute(self.delete_unique_sql % (qn(table_name), qn(constraint))) + + + def column_sql(self, table_name, field_name, field, tablespace=''): + """ + Creates the SQL snippet for a column. Used by add_column and add_table. + """ + qn = connection.ops.quote_name + + field.set_attributes_from_name(field_name) + + # hook for the field to do any resolution prior to it's attributes being queried + if hasattr(field, 'south_init'): + field.south_init() + + # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL) + field = self._field_sanity(field) + + sql = field.db_type() + if sql: + field_output = [qn(field.column), sql] + field_output.append('%sNULL' % (not field.null and 'NOT ' or '')) + if field.primary_key: + field_output.append('PRIMARY KEY') + elif field.unique: + # Just use UNIQUE (no indexes any more, we have delete_unique) + field_output.append('UNIQUE') + + tablespace = field.db_tablespace or tablespace + if tablespace and connection.features.supports_tablespaces and field.unique: + # We must specify the index tablespace inline, because we + # won't be generating a CREATE INDEX statement for this field. + field_output.append(connection.ops.tablespace_sql(tablespace, inline=True)) + + sql = ' '.join(field_output) + sqlparams = () + # if the field is "NOT NULL" and a default value is provided, create the column with it + # this allows the addition of a NOT NULL field to a table with existing rows + if not field.null and not getattr(field, '_suppress_default', False) and field.has_default(): + default = field.get_default() + # If the default is actually None, don't add a default term + if default is not None: + # If the default is a callable, then call it! + if callable(default): + default = default() + # Now do some very cheap quoting. TODO: Redesign return values to avoid this. + if isinstance(default, basestring): + default = "'%s'" % default.replace("'", "''") + elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)): + default = "'%s'" % default + sql += " DEFAULT %s" + sqlparams = (default) + elif (not field.null and field.blank) or ((field.get_default() == '') and (not getattr(field, '_suppress_default', False))): + if field.empty_strings_allowed and connection.features.interprets_empty_strings_as_nulls: + sql += " DEFAULT ''" + # Error here would be nice, but doesn't seem to play fair. + #else: + # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.") + + if field.rel and self.supports_foreign_keys: + self.add_deferred_sql( + self.foreign_key_sql( + table_name, + field.column, + field.rel.to._meta.db_table, + field.rel.to._meta.get_field(field.rel.field_name).column + ) + ) + + if field.db_index and not field.unique: + self.add_deferred_sql(self.create_index_sql(table_name, [field.column])) + + if hasattr(field, 'post_create_sql'): + style = no_style() + for stmt in field.post_create_sql(style, table_name): + self.add_deferred_sql(stmt) + + if sql: + return sql % sqlparams + else: + return None + + + def _field_sanity(self, field): + """ + Placeholder for DBMS-specific field alterations (some combos aren't valid, + e.g. DEFAULT and TEXT on MySQL) + """ + return field + + + def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name): + """ + Generates a full SQL statement to add a foreign key constraint + """ + qn = connection.ops.quote_name + constraint_name = '%s_refs_%s_%x' % (from_column_name, to_column_name, abs(hash((from_table_name, to_table_name)))) + return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % ( + qn(from_table_name), + qn(truncate_name(constraint_name, connection.ops.max_name_length())), + qn(from_column_name), + qn(to_table_name), + qn(to_column_name), + connection.ops.deferrable_sql() # Django knows this + ) + + + def delete_foreign_key(self, table_name, column): + "Drop a foreign key constraint" + qn = connection.ops.quote_name + if self.dry_run: + return # We can't look at the DB to get the constraints + constraints = list(self._constraints_affecting_columns(table_name, [column], "FOREIGN KEY")) + if not constraints: + raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column)) + for constraint_name in constraints: + self.execute(self.delete_foreign_key_sql % (qn(table_name), qn(constraint_name))) + + drop_foreign_key = alias('delete_foreign_key') + + + def create_index_name(self, table_name, column_names, suffix=""): + """ + Generate a unique name for the index + """ + index_unique_name = '' + + if len(column_names) > 1: + index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names)))) + + # If the index name is too long, truncate it + index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)) + if len(index_name) > self.max_index_name_length: + part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix)) + index_name = '%s%s' % (table_name[:(self.max_index_name_length-len(part))], part) + + return index_name + + + def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''): + """ + Generates a create index statement on 'table_name' for a list of 'column_names' + """ + qn = connection.ops.quote_name + if not column_names: + print "No column names supplied on which to create an index" + return '' + + if db_tablespace and connection.features.supports_tablespaces: + tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace) + else: + tablespace_sql = '' + + index_name = self.create_index_name(table_name, column_names) + qn = connection.ops.quote_name + return 'CREATE %sINDEX %s ON %s (%s)%s;' % ( + unique and 'UNIQUE ' or '', + qn(index_name), + qn(table_name), + ','.join([qn(field) for field in column_names]), + tablespace_sql + ) + + def create_index(self, table_name, column_names, unique=False, db_tablespace=''): + """ Executes a create index statement """ + sql = self.create_index_sql(table_name, column_names, unique, db_tablespace) + self.execute(sql) + + + def delete_index(self, table_name, column_names, db_tablespace=''): + """ + Deletes an index created with create_index. + This is possible using only columns due to the deterministic + index naming function which relies on column names. + """ + if isinstance(column_names, (str, unicode)): + column_names = [column_names] + name = self.create_index_name(table_name, column_names) + qn = connection.ops.quote_name + sql = self.drop_index_string % {"index_name": qn(name), "table_name": qn(table_name)} + self.execute(sql) + + drop_index = alias('delete_index') + + + def delete_column(self, table_name, name): + """ + Deletes the column 'column_name' from the table 'table_name'. + """ + qn = connection.ops.quote_name + params = (qn(table_name), qn(name)) + self.execute(self.delete_column_string % params, []) + + drop_column = alias('delete_column') + + + def rename_column(self, table_name, old, new): + """ + Renames the column 'old' from the table 'table_name' to 'new'. + """ + raise NotImplementedError("rename_column has no generic SQL syntax") + + + def drop_primary_key(self, table_name): + """ + Drops the old primary key. + """ + qn = connection.ops.quote_name + self.execute(self.drop_primary_key_string % { + "table": qn(table_name), + "constraint": qn(table_name+"_pkey"), + }) + + delete_primary_key = alias('drop_primary_key') + + + def create_primary_key(self, table_name, columns): + """ + Creates a new primary key on the specified columns. + """ + if not isinstance(columns, (list, tuple)): + columns = [columns] + qn = connection.ops.quote_name + self.execute(self.create_primary_key_string % { + "table": qn(table_name), + "constraint": qn(table_name+"_pkey"), + "columns": ", ".join(map(qn, columns)), + }) + + + def start_transaction(self): + """ + Makes sure the following commands are inside a transaction. + Must be followed by a (commit|rollback)_transaction call. + """ + if self.dry_run: + self.pending_transactions += 1 + transaction.commit_unless_managed() + transaction.enter_transaction_management() + transaction.managed(True) + + + def commit_transaction(self): + """ + Commits the current transaction. + Must be preceded by a start_transaction call. + """ + if self.dry_run: + return + transaction.commit() + transaction.leave_transaction_management() + + + def rollback_transaction(self): + """ + Rolls back the current transaction. + Must be preceded by a start_transaction call. + """ + if self.dry_run: + self.pending_transactions -= 1 + transaction.rollback() + transaction.leave_transaction_management() + + def rollback_transactions_dry_run(self): + """ + Rolls back all pending_transactions during this dry run. + """ + if not self.dry_run: + return + while self.pending_transactions > 0: + self.rollback_transaction() + if transaction.is_dirty(): + # Force an exception, if we're still in a dirty transaction. + # This means we are missing a COMMIT/ROLLBACK. + transaction.leave_transaction_management() + + + def send_create_signal(self, app_label, model_names): + self.pending_create_signals.append((app_label, model_names)) + + + def send_pending_create_signals(self): + # Group app_labels together + signals = SortedDict() + for (app_label, model_names) in self.pending_create_signals: + try: + signals[app_label].extend(model_names) + except KeyError: + signals[app_label] = list(model_names) + # Send only one signal per app. + for (app_label, model_names) in signals.iteritems(): + self.really_send_create_signal(app_label, list(set(model_names))) + self.pending_create_signals = [] + + + def really_send_create_signal(self, app_label, model_names): + """ + Sends a post_syncdb signal for the model specified. + + If the model is not found (perhaps it's been deleted?), + no signal is sent. + + TODO: The behavior of django.contrib.* apps seems flawed in that + they don't respect created_models. Rather, they blindly execute + over all models within the app sending the signal. This is a + patch we should push Django to make For now, this should work. + """ + if self.debug: + print " - Sending post_syncdb signal for %s: %s" % (app_label, model_names) + app = models.get_app(app_label) + if not app: + return + + created_models = [] + for model_name in model_names: + model = models.get_model(app_label, model_name) + if model: + created_models.append(model) + + if created_models: + # syncdb defaults -- perhaps take these as options? + verbosity = 1 + interactive = True + + if hasattr(dispatcher, "send"): + dispatcher.send(signal=models.signals.post_syncdb, sender=app, + app=app, created_models=created_models, + verbosity=verbosity, interactive=interactive) + else: + models.signals.post_syncdb.send(sender=app, + app=app, created_models=created_models, + verbosity=verbosity, interactive=interactive) + + + def mock_model(self, model_name, db_table, db_tablespace='', + pk_field_name='id', pk_field_type=models.AutoField, + pk_field_args=[], pk_field_kwargs={}): + """ + Generates a MockModel class that provides enough information + to be used by a foreign key/many-to-many relationship. + + Migrations should prefer to use these rather than actual models + as models could get deleted over time, but these can remain in + migration files forever. + + Depreciated. + """ + class MockOptions(object): + def __init__(self): + self.db_table = db_table + self.db_tablespace = db_tablespace or settings.DEFAULT_TABLESPACE + self.object_name = model_name + self.module_name = model_name.lower() + + if pk_field_type == models.AutoField: + pk_field_kwargs['primary_key'] = True + + self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs) + self.pk.set_attributes_from_name(pk_field_name) + self.abstract = False + + def get_field_by_name(self, field_name): + # we only care about the pk field + return (self.pk, self.model, True, False) + + def get_field(self, name): + # we only care about the pk field + return self.pk + + class MockModel(object): + _meta = None + + # We need to return an actual class object here, not an instance + MockModel._meta = MockOptions() + MockModel._meta.model = MockModel + return MockModel + + +# Single-level flattening of lists +def flatten(ls): + nl = [] + for l in ls: + nl += l + return nl diff --git a/south/db/mysql.py b/south/db/mysql.py new file mode 100644 index 000000000..edf567578 --- /dev/null +++ b/south/db/mysql.py @@ -0,0 +1,147 @@ + +from django.db import connection +from django.conf import settings +from south.db import generic + +class DatabaseOperations(generic.DatabaseOperations): + + """ + MySQL implementation of database operations. + """ + + backend_name = "mysql" + alter_string_set_type = '' + alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;' + alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;' + drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s' + drop_primary_key_string = "ALTER TABLE %(table)s DROP PRIMARY KEY" + allows_combined_alters = False + has_ddl_transactions = False + has_check_constraints = False + delete_unique_sql = "ALTER TABLE %s DROP INDEX %s" + + + def connection_init(self): + """ + Run before any SQL to let database-specific config be sent as a command, + e.g. which storage engine (MySQL) or transaction serialisability level. + """ + if hasattr(settings, "DATABASE_STORAGE_ENGINE") and \ + settings.DATABASE_STORAGE_ENGINE: + cursor = connection.cursor() + cursor.execute("SET storage_engine=%s;" % settings.DATABASE_STORAGE_ENGINE) + + + def rename_column(self, table_name, old, new): + if old == new or self.dry_run: + return [] + + qn = connection.ops.quote_name + + rows = [x for x in self.execute('DESCRIBE %s' % (qn(table_name),)) if x[0] == old] + + if not rows: + raise ValueError("No column '%s' in '%s'." % (old, table_name)) + + params = ( + qn(table_name), + qn(old), + qn(new), + rows[0][1], + rows[0][2] == "YES" and "NULL" or "NOT NULL", + rows[0][4] and "DEFAULT " or "", + rows[0][4] and "%s" or "", + rows[0][5] or "", + ) + + sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s;' % params + + if rows[0][4]: + self.execute(sql, (rows[0][4],)) + else: + self.execute(sql) + + + def delete_column(self, table_name, name): + qn = connection.ops.quote_name + db_name = settings.DATABASE_NAME + + # See if there is a foreign key on this column + cursor = connection.cursor() + get_fkeyname_query = "SELECT tc.constraint_name FROM \ + information_schema.table_constraints tc, \ + information_schema.key_column_usage kcu \ + WHERE tc.table_name=kcu.table_name \ + AND tc.table_schema=kcu.table_schema \ + AND tc.constraint_name=kcu.constraint_name \ + AND tc.constraint_type='FOREIGN KEY' \ + AND tc.table_schema='%s' \ + AND tc.table_name='%s' \ + AND kcu.column_name='%s'" + + result = cursor.execute(get_fkeyname_query % (db_name, table_name, name)) + + # if a foreign key exists, we need to delete it first + if result > 0: + assert result == 1 #we should only have one result + fkey_name = cursor.fetchone()[0] + drop_query = "ALTER TABLE %s DROP FOREIGN KEY %s" + cursor.execute(drop_query % (qn(table_name), qn(fkey_name))) + + super(DatabaseOperations, self).delete_column(table_name, name) + + + def rename_table(self, old_table_name, table_name): + """ + Renames the table 'old_table_name' to 'table_name'. + """ + if old_table_name == table_name: + # No Operation + return + qn = connection.ops.quote_name + params = (qn(old_table_name), qn(table_name)) + self.execute('RENAME TABLE %s TO %s;' % params) + + + def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"): + """ + Gets the names of the constraints affecting the given columns. + """ + + if self.dry_run: + raise ValueError("Cannot get constraints for columns during a dry run.") + + columns = set(columns) + db_name = settings.DATABASE_NAME + # First, load all constraint->col mappings for this table. + rows = self.execute(""" + SELECT kc.constraint_name, kc.column_name + FROM information_schema.key_column_usage AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + kc.table_schema = %s AND + kc.table_catalog IS NULL AND + kc.table_name = %s AND + c.constraint_type = %s + """, [db_name, table_name, type]) + # Load into a dict + mapping = {} + for constraint, column in rows: + mapping.setdefault(constraint, set()) + mapping[constraint].add(column) + # Find ones affecting these columns + for constraint, itscols in mapping.items(): + if itscols == columns: + yield constraint + + + def _field_sanity(self, field): + """ + This particular override stops us sending DEFAULTs for BLOB/TEXT columns. + """ + if field.db_type().upper() in ["BLOB", "TEXT", "LONGTEXT"]: + field._suppress_default = True + return field diff --git a/south/db/postgresql_psycopg2.py b/south/db/postgresql_psycopg2.py new file mode 100644 index 000000000..5c1d76354 --- /dev/null +++ b/south/db/postgresql_psycopg2.py @@ -0,0 +1,64 @@ + +from django.db import connection, models +from south.db import generic + +class DatabaseOperations(generic.DatabaseOperations): + + """ + PsycoPG2 implementation of database operations. + """ + + backend_name = "postgres" + + def rename_column(self, table_name, old, new): + if old == new: + return [] + qn = connection.ops.quote_name + params = (qn(table_name), qn(old), qn(new)) + self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % params) + + def rename_table(self, old_table_name, table_name): + "will rename the table and an associated ID sequence and primary key index" + # First, rename the table + generic.DatabaseOperations.rename_table(self, old_table_name, table_name) + # Then, try renaming the ID sequence + # (if you're using other AutoFields... your problem, unfortunately) + self.commit_transaction() + self.start_transaction() + try: + generic.DatabaseOperations.rename_table(self, old_table_name+"_id_seq", table_name+"_id_seq") + except: + if self.debug: + print " ~ No such sequence (ignoring error)" + self.rollback_transaction() + else: + self.commit_transaction() + self.start_transaction() + + # Rename primary key index, will not rename other indices on + # the table that are used by django (e.g. foreign keys). Until + # figure out how, you need to do this yourself. + try: + generic.DatabaseOperations.rename_table(self, old_table_name+"_pkey", table_name+ "_pkey") + except: + if self.debug: + print " ~ No such primary key (ignoring error)" + self.rollback_transaction() + else: + self.commit_transaction() + self.start_transaction() + + + def rename_index(self, old_index_name, index_name): + "Rename an index individually" + generic.DatabaseOperations.rename_table(self, old_index_name, index_name) + + def _db_type_for_alter_column(self, field): + """ + Returns a field's type suitable for ALTER COLUMN. + Strips CHECKs from PositiveSmallIntegerField) and PositiveIntegerField + @param field: The field to generate type for + """ + if isinstance(field, models.PositiveSmallIntegerField) or isinstance(field, models.PositiveIntegerField): + return field.db_type().split(" ")[0] + return super(DatabaseOperations, self)._db_type_for_alter_column(field) diff --git a/south/db/sql_server/.gitignore b/south/db/sql_server/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/db/sql_server/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/db/sql_server/__init__.py b/south/db/sql_server/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/south/db/sql_server/pyodbc.py b/south/db/sql_server/pyodbc.py new file mode 100644 index 000000000..117b9688a --- /dev/null +++ b/south/db/sql_server/pyodbc.py @@ -0,0 +1,148 @@ +from django.db import connection +from django.db.models.fields import * +from south.db import generic + +class DatabaseOperations(generic.DatabaseOperations): + """ + django-pyodbc (sql_server.pyodbc) implementation of database operations. + """ + + backend_name = "pyodbc" + + add_column_string = 'ALTER TABLE %s ADD %s;' + alter_string_set_type = 'ALTER COLUMN %(column)s %(type)s' + alter_string_drop_null = 'ALTER COLUMN %(column)s %(type)s NOT NULL' + allows_combined_alters = False + + drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s' + drop_constraint_string = 'ALTER TABLE %(table_name)s DROP CONSTRAINT %(constraint_name)s' + delete_column_string = 'ALTER TABLE %s DROP COLUMN %s' + + + def delete_column(self, table_name, name): + qn = connection.ops.quote_name + q_table_name, q_name = (qn(table_name), qn(name)) + + # Zap the indexes + for ind in self._find_indexes_for_column(table_name,name): + params = {'table_name':q_table_name, 'index_name': ind} + sql = self.drop_index_string % params + self.execute(sql, []) + + # Zap the constraints + for const in self._find_constraints_for_column(table_name,name): + params = {'table_name':q_table_name, 'constraint_name': const} + sql = self.drop_constraint_string % params + self.execute(sql, []) + + # Finally zap the column itself + self.execute(self.delete_column_string % (q_table_name, q_name), []) + + def _find_indexes_for_column(self, table_name, name): + "Find the indexes that apply to a column, needed when deleting" + qn = connection.ops.quote_name + q_table_name, q_name = (qn(table_name), qn(name)) + + sql = """ + SELECT si.name, si.id, sik.colid, sc.name + FROM dbo.sysindexes SI WITH (NOLOCK) + INNER JOIN dbo.sysindexkeys SIK WITH (NOLOCK) + ON SIK.id = Si.id + AND SIK.indid = SI.indid + INNER JOIN dbo.syscolumns SC WITH (NOLOCK) + ON SI.id = SC.id + AND SIK.colid = SC.colid + WHERE SI.indid !=0 + AND Si.id = OBJECT_ID('%s') + AND SC.name = '%s' + """ + idx = self.execute(sql % (table_name, name), []) + return [i[0] for i in idx] + + def _find_constraints_for_column(self, table_name, name): + "Find the constraints that apply to a column, needed when deleting" + qn = connection.ops.quote_name + q_table_name, q_name = (qn(table_name), qn(name)) + + sql = """ + SELECT + Cons.xtype, + Cons.id, + Cons.[name] + FROM dbo.sysobjects AS Cons WITH(NOLOCK) + INNER JOIN ( + SELECT [id], colid, name + FROM dbo.syscolumns WITH(NOLOCK) + WHERE id = OBJECT_ID('%s') + AND name = '%s' + ) AS Cols + ON Cons.parent_obj = Cols.id + WHERE Cons.parent_obj = OBJECT_ID('%s') + AND ( + (OBJECTPROPERTY(Cons.[id],'IsConstraint') = 1 + AND Cons.info = Cols.colid) + OR (OBJECTPROPERTY(Cons.[id],'IsForeignKey') = 1 + AND LEFT(Cons.name,%d) = '%s') + ) + """ + cons = self.execute(sql % (table_name, name, table_name, len(name), name), []) + return [c[2] for c in cons] + + + def drop_column_default_sql(self, table_name, name, q_name): + "MSSQL specific drop default, which is a pain" + + sql = """ + SELECT object_name(cdefault) + FROM syscolumns + WHERE id = object_id('%s') + AND name = '%s' + """ + cons = self.execute(sql % (table_name, name), []) + if cons and cons[0] and cons[0][0]: + return "DROP CONSTRAINT %s" % cons[0][0] + return None + + def _fix_field_definition(self, field): + if isinstance(field, BooleanField): + if field.default == True: + field.default = 1 + if field.default == False: + field.default = 0 + + def add_column(self, table_name, name, field, keep_default=True): + self._fix_field_definition(field) + generic.DatabaseOperations.add_column(self, table_name, name, field, keep_default) + + def create_table(self, table_name, fields): + # Tweak stuff as needed + for name,f in fields: + self._fix_field_definition(f) + + # Run + generic.DatabaseOperations.create_table(self, table_name, fields) + + def rename_column(self, table_name, old, new): + """ + Renames the column of 'table_name' from 'old' to 'new'. + WARNING - This isn't transactional on MSSQL! + """ + if old == new: + # No Operation + return + # Examples on the MS site show the table name not being quoted... + qn = connection.ops.quote_name + params = (table_name,qn(old), qn(new)) + self.execute("EXEC sp_rename '%s.%s', %s, 'COLUMN'" % params) + + def rename_table(self, old_table_name, table_name): + """ + Renames the table 'old_table_name' to 'table_name'. + WARNING - This isn't transactional on MSSQL! + """ + if old_table_name == table_name: + # No Operation + return + qn = connection.ops.quote_name + params = (qn(old_table_name), qn(table_name)) + self.execute('EXEC sp_rename %s, %s' % params) diff --git a/south/db/sqlite3.py b/south/db/sqlite3.py new file mode 100644 index 000000000..bbba99941 --- /dev/null +++ b/south/db/sqlite3.py @@ -0,0 +1,225 @@ +import inspect +import re + +from django.db import connection +from django.db.models import ForeignKey + +from south.db import generic + +# from how .schema works as shown on http://www.sqlite.org/sqlite.html +GET_TABLE_DEF_SQL = """ +SELECT sql FROM + (SELECT * FROM sqlite_master UNION ALL + SELECT * FROM sqlite_temp_master) + WHERE tbl_name LIKE '%s' + AND type!='meta' AND sql NOT NULL AND name NOT LIKE 'sqlite_%%%%' + ORDER BY substr(type,2,1), name;""" + +class DatabaseOperations(generic.DatabaseOperations): + + """ + SQLite3 implementation of database operations. + """ + + backend_name = "sqlite3" + + # SQLite ignores foreign key constraints. I wish I could. + supports_foreign_keys = False + defered_alters = {} + def __init__(self): + super(DatabaseOperations, self).__init__() + # holds fields defintions gotten from the sql schema. the key is the table name and then + # it's a list of 2 item lists. the two items in the list are fieldname, sql definition + self._fields = {} + + def _populate_current_structure(self, table_name, force=False): + # get if we don't have it already or are being forced to refresh it + if force or not table_name in self._fields.keys(): + cursor = connection.cursor() + cursor.execute(GET_TABLE_DEF_SQL % table_name) + create_table = cursor.fetchall()[0][0] + first = create_table.find('(') + last = create_table.rfind(')') + # rip out the CREATE TABLE xxx ( ) and only get the field definitions plus + # add the trailing comma to make the next part easier + fields_part = create_table[first+1: last] + ',' + # pull out the field name and definition for each field + self._fields[table_name] = re.findall(r'"(\S+?)"(.*?),', fields_part, re.DOTALL) + + def _rebuild_table(self, table_name, new_fields): + """ + rebuilds the table using the new definitions. only one change + can be made per call and it must be either a rename, alter or + delete + """ + self._populate_current_structure(table_name) + + current_fields = self._fields[table_name] + temp_table_name = '%s_temp' % table_name + operation = None + changed_field = None + + if len(current_fields) != len(new_fields): + if len(current_fields) - len(new_fields) != 1: + raise ValueError('only one field can be deleted at a time, found %s missing fields' % str(len(current_fields) - len(new_fields))) + operation = 'delete' + current_field_names = [f[0] for f in current_fields] + new_field_names = [f[0] for f in new_fields] + # find the deleted field + for f in current_field_names: + if not f in new_field_names: + changed_field = f + break + else: + found = False + for current, new in zip(current_fields, new_fields): + if current[0] != new[0]: + if found: + raise ValueError('can only handle one change per call, found more than one') + operation = 'rename' + changed_field = (current[0], new[0]) + found = True + elif current[1] != new[1]: + if found: + raise ValueError('can only handle one change per call, found more than one') + operation = 'alter' + changed_field = current[0] + found = True + if not found: + raise ValueError('no changed found') + # create new table as temp + create = 'CREATE TABLE "%s" ( %s )' + fields_sql = ','.join(['"%s" %s' % (f[0], f[1]) for f in new_fields]) + sql = create % (temp_table_name, fields_sql) + + cursor = connection.cursor() + cursor.execute(sql) + + # copy over data + # rename, redef or delete? + if operation in ['rename', 'alter']: + sql = 'insert into %s select * from %s' % (temp_table_name, table_name) + elif operation == 'delete': + new_field_names = ','.join(['"%s"' % f[0] for f in new_fields]) + sql = 'insert into %s select %s from %s' % (temp_table_name, new_field_names, table_name) + cursor.execute(sql) + + # remove existing table + self.delete_table(table_name) + + # rename new table + self.rename_table(temp_table_name, table_name) + + # repopulate field info + self._populate_current_structure(table_name, force=True) + + def _defer_alter_sqlite_table(self, table_name, field_renames={}): + table_renames = self.defered_alters.get(table_name, {}) + table_renames.update(field_renames) + self.defered_alters[table_name] = table_renames + + # You can't add UNIQUE columns with an ALTER TABLE. + def add_column(self, table_name, name, field, *args, **kwds): + # Run ALTER TABLE with no unique column + unique, field._unique, field.db_index = field.unique, False, False + # If it's not nullable, and has no default, raise an error (SQLite is picky) + if (not field.null and + (not field.has_default() or field.get_default() is None) and + not field.empty_strings_allowed): + raise ValueError("You cannot add a null=False column without a default value.") + # Don't try and drop the default, it'll fail + kwds['keep_default'] = True + generic.DatabaseOperations.add_column(self, table_name, name, field, *args, **kwds) + # If it _was_ unique, make an index on it. + if unique: + self.create_index(table_name, [field.column], unique=True) + + def _alter_sqlite_table(self, table_name, field_renames={}): + + # Detect the model for the given table name + model = None + for omodel in self.current_orm: + if omodel._meta.db_table == table_name: + model = omodel + if model is None: + raise ValueError("Cannot find ORM model for '%s'." % table_name) + + temp_name = table_name + "_temporary_for_schema_change" + self.rename_table(table_name, temp_name) + fields = [(fld.name, fld) for fld in model._meta.fields] + self.create_table(table_name, fields) + + columns = [fld.column for name, fld in fields] + self.copy_data(temp_name, table_name, columns, field_renames) + self.delete_table(temp_name, cascade=False) + + def alter_column(self, table_name, name, field, explicit_name=True): + self._populate_current_structure(table_name) + new_fields = [] + for field_name, field_def in self._fields[table_name]: + if field_name == name: + if isinstance(field, ForeignKey): + field_name = name[:-3] # exclude the _id when calling column_sql + else: + field_name = name + new_fields.append((name, self.column_sql(table_name, field_name, field))) + else: + new_fields.append((field_name, field_def)) + self._rebuild_table(table_name, new_fields) + + + def delete_column(self, table_name, column_name): + self._populate_current_structure(table_name) + new_fields = [] + for field_name, field_def in self._fields[table_name]: + if field_name != column_name: + new_fields.append((field_name, field_def)) + self._rebuild_table(table_name, new_fields) + + def rename_column(self, table_name, old, new): + self._populate_current_structure(table_name) + new_fields = [] + for field_name, field_def in self._fields[table_name]: + if field_name == old: + new_fields.append((new, field_def)) + else: + new_fields.append((field_name, field_def)) + self._rebuild_table(table_name, new_fields) + + # Nor unique creation + def create_unique(self, table_name, columns): + """ + Not supported under SQLite. + """ + print " ! WARNING: SQLite does not support adding unique constraints. Ignored." + + # Nor unique deletion + def delete_unique(self, table_name, columns): + """ + Not supported under SQLite. + """ + print " ! WARNING: SQLite does not support removing unique constraints. Ignored." + + # No cascades on deletes + def delete_table(self, table_name, cascade=True): + generic.DatabaseOperations.delete_table(self, table_name, False) + + def copy_data(self, src, dst, fields, field_renames={}): + qn = connection.ops.quote_name + q_fields = [field for field in fields] + for old, new in field_renames.items(): + q_fields[q_fields.index(new)] = "%s AS %s" % (old, qn(new)) + sql = "INSERT INTO %s SELECT %s FROM %s;" % (qn(dst), ', '.join(q_fields), qn(src)) + self.execute(sql) + + def execute_deferred_sql(self): + """ + Executes all deferred SQL, resetting the deferred_sql list + """ + for table_name, params in self.defered_alters.items(): + self._alter_sqlite_table(table_name, params) + self.defered_alters = {} + + generic.DatabaseOperations.execute_deferred_sql(self) + + diff --git a/south/hacks/.gitignore b/south/hacks/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/hacks/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/hacks/__init__.py b/south/hacks/__init__.py new file mode 100644 index 000000000..8f28503ed --- /dev/null +++ b/south/hacks/__init__.py @@ -0,0 +1,10 @@ +""" +The hacks module encapsulates all the horrible things that play with Django +internals in one, evil place. +This top file will automagically expose the correct Hacks class. +""" + +# Currently, these work for 1.0 and 1.1. +from south.hacks.django_1_0 import Hacks + +hacks = Hacks() \ No newline at end of file diff --git a/south/hacks/django_1_0.py b/south/hacks/django_1_0.py new file mode 100644 index 000000000..8acde3232 --- /dev/null +++ b/south/hacks/django_1_0.py @@ -0,0 +1,71 @@ +""" +Hacks for the Django 1.0/1.0.2 releases. +""" + +from django.conf import settings +from django.db import models +from django.db.models.loading import AppCache, cache + +class Hacks: + + def set_installed_apps(self, apps): + """ + Sets Django's INSTALLED_APPS setting to be effectively the list passed in. + """ + + # Make sure it's a list. + apps = list(apps) + + # This function will be monkeypatched into place. + def new_get_apps(): + return apps + + # Monkeypatch in! + models.get_apps_old, models.get_apps = models.get_apps, new_get_apps + settings.INSTALLED_APPS, settings.OLD_INSTALLED_APPS = ( + apps, + settings.INSTALLED_APPS, + ) + self._redo_app_cache() + + + def reset_installed_apps(self): + """ + Undoes the effect of set_installed_apps. + """ + models.get_apps = models.get_apps_old + settings.INSTALLED_APPS = settings.OLD_INSTALLED_APPS + self._redo_app_cache() + + + def _redo_app_cache(self): + """ + Used to repopulate AppCache after fiddling with INSTALLED_APPS. + """ + a = AppCache() + a.loaded = False + a._populate() + + + def clear_app_cache(self): + """ + Clears the contents of AppCache to a blank state, so new models + from the ORM can be added. + """ + self.old_app_models = cache.app_models + cache.app_models = {} + + + def unclear_app_cache(self): + """ + Reversed the effects of clear_app_cache. + """ + cache.app_models = self.old_app_models + + + def repopulate_app_cache(self): + """ + Rebuilds AppCache with the real model definitions. + """ + cache._populate() + \ No newline at end of file diff --git a/south/introspection_plugins/.gitignore b/south/introspection_plugins/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/introspection_plugins/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/introspection_plugins/__init__.py b/south/introspection_plugins/__init__.py new file mode 100644 index 000000000..75ef06346 --- /dev/null +++ b/south/introspection_plugins/__init__.py @@ -0,0 +1,6 @@ +# This module contains built-in introspector plugins for various common +# Django apps. + +# These imports trigger the lower-down files +import south.introspection_plugins.geodjango +import south.introspection_plugins.django_tagging diff --git a/south/introspection_plugins/django_tagging.py b/south/introspection_plugins/django_tagging.py new file mode 100644 index 000000000..065c35db2 --- /dev/null +++ b/south/introspection_plugins/django_tagging.py @@ -0,0 +1,19 @@ +from south.modelsinspector import add_introspection_rules + +try: + from tagging.fields import TagField +except ImportError: + pass +else: + rules = [ + ( + (TagField, ), + [], + { + "blank": ["blank", {"default": True}], + "max_length": ["max_length", {"default": 255}], + }, + ), + ] + + add_introspection_rules(rules, ["^tagging\.fields",]) diff --git a/south/introspection_plugins/geodjango.py b/south/introspection_plugins/geodjango.py new file mode 100644 index 000000000..cacad0ea2 --- /dev/null +++ b/south/introspection_plugins/geodjango.py @@ -0,0 +1,44 @@ +""" +GeoDjango introspection rules +""" + +import django +from django.conf import settings + +from south.modelsinspector import add_introspection_rules + +has_gis = "django.contrib.gis" in settings.INSTALLED_APPS + +if has_gis: + # Alright,import the field + from django.contrib.gis.db.models.fields import GeometryField + + # Make some introspection rules + if django.VERSION[0] == 1 and django.VERSION[1] >= 1: + # Django 1.1's gis module renamed these. + rules = [ + ( + (GeometryField, ), + [], + { + "srid": ["srid", {"default": 4326}], + "spatial_index": ["spatial_index", {"default": True}], + "dim": ["dim", {"default": 2}], + }, + ), + ] + else: + rules = [ + ( + (GeometryField, ), + [], + { + "srid": ["_srid", {"default": 4326}], + "spatial_index": ["_index", {"default": True}], + "dim": ["_dim", {"default": 2}], + }, + ), + ] + + # Install them + add_introspection_rules(rules, ["^django\.contrib\.gis"]) \ No newline at end of file diff --git a/south/logger.py b/south/logger.py new file mode 100644 index 000000000..052236aa1 --- /dev/null +++ b/south/logger.py @@ -0,0 +1,26 @@ +import sys +import logging +from django.conf import settings + +class NullHandler(logging.Handler): + def emit(self, record): + pass + +h = NullHandler() + +_logger = logging.getLogger("south") +_logger.addHandler(h) +_logger.setLevel(logging.DEBUG) +# TODO: Add a log formatter? + +def get_logger(): + debug_on = getattr(settings, "SOUTH_LOGGING_ON", False) + logging_file = getattr(settings, "SOUTH_LOGGING_FILE", False) + + if debug_on: + if logging_file: + _logger.addHandler( logging.FileHandler(logging_file) ) + _logger.setLevel(logging.DEBUG) + else: + raise IOError, "SOUTH_LOGGING_ON is True. You also need a SOUTH_LOGGING_FILE setting." + return _logger diff --git a/south/management/.gitignore b/south/management/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/management/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/management/__init__.py b/south/management/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/south/management/commands/.gitignore b/south/management/commands/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/management/commands/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/management/commands/__init__.py b/south/management/commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/south/management/commands/convert_to_south.py b/south/management/commands/convert_to_south.py new file mode 100644 index 000000000..5dcbffbdc --- /dev/null +++ b/south/management/commands/convert_to_south.py @@ -0,0 +1,65 @@ +from django.core.management.base import BaseCommand +from django.core.management.color import no_style +from django.conf import settings +from django.db import models +from django.core import management +from optparse import make_option +from django.core.exceptions import ImproperlyConfigured +from south.migration import get_app +from south.hacks import hacks +import sys + +class Command(BaseCommand): + + option_list = BaseCommand.option_list + if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]: + option_list += ( + make_option('--verbosity', action='store', dest='verbosity', default='1', + type='choice', choices=['0', '1', '2'], + help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'), + ) + + help = "Quickly converts the named application to use South if it is currently using syncdb." + + def handle(self, app=None, *args, **options): + + # Make sure we have an app + if not app: + print "Please specify an app to convert." + return + + # See if the app exists + app = app.split(".")[-1] + try: + app_module = models.get_app(app) + except ImproperlyConfigured: + print "There is no enabled application matching '%s'." % app + return + + # Try to get its list of models + model_list = models.get_models(app_module) + if not model_list: + print "This application has no models; this command is for applications that already have models syncdb'd." + print "Make some models, and then use ./manage.py startmigration %s --initial instead." % app + return + + # Ask South if it thinks it's already got migrations + if get_app(app_module): + print "This application is already managed by South." + return + + # Finally! It seems we've got a candidate, so do the two-command trick + verbosity = int(options.get('verbosity', 0)) + management.call_command("startmigration", app, initial=True, verbosity=verbosity) + + # Now, we need to re-clean and sanitise appcache + hacks.clear_app_cache() + hacks.repopulate_app_cache() + + # Now, migrate + management.call_command("migrate", app, "0001", fake=True, verbosity=verbosity) + + print + print "App '%s' converted. Note that South assumed the application's models matched the database" % app + print "(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations" % app + print "directory, revert models.py so it matches the database, and try again." diff --git a/south/management/commands/migrate.py b/south/management/commands/migrate.py new file mode 100644 index 000000000..038aea476 --- /dev/null +++ b/south/management/commands/migrate.py @@ -0,0 +1,120 @@ +""" +Migrate management command. +""" + +import sys +from optparse import make_option + +from django.core.management.base import BaseCommand +from django.core.management.color import no_style +from django.conf import settings +from django.db import models + +from south import migration + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--all', action='store_true', dest='all_apps', default=False, + help='Run the specified migration for all apps.'), + make_option('--list', action='store_true', dest='list', default=False, + help='List migrations noting those that have been applied'), + make_option('--skip', action='store_true', dest='skip', default=False, + help='Will skip over out-of-order missing migrations'), + make_option('--merge', action='store_true', dest='merge', default=False, + help='Will run out-of-order missing migrations as they are - no rollbacks.'), + make_option('--no-initial-data', action='store_true', dest='no_initial_data', default=False, + help='Skips loading initial data if specified.'), + make_option('--fake', action='store_true', dest='fake', default=False, + help="Pretends to do the migrations, but doesn't actually execute them."), + make_option('--db-dry-run', action='store_true', dest='db_dry_run', default=False, + help="Doesn't execute the SQL generated by the db methods, and doesn't store a record that the migration(s) occurred. Useful to test migrations before applying them."), + ) + if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]: + option_list += ( + make_option('--verbosity', action='store', dest='verbosity', default='1', + type='choice', choices=['0', '1', '2'], + help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'), + ) + help = "Runs migrations for all apps." + args = "[appname] [migrationname|zero] [--all] [--list] [--skip] [--merge] [--no-initial-data] [--fake] [--db-dry-run]" + + def handle(self, app=None, target=None, skip=False, merge=False, backwards=False, fake=False, db_dry_run=False, list=False, **options): + + # Work out what the resolve mode is + resolve_mode = merge and "merge" or (skip and "skip" or None) + + # NOTE: THIS IS DUPLICATED FROM django.core.management.commands.syncdb + # This code imports any module named 'management' in INSTALLED_APPS. + # The 'management' module is the preferred way of listening to post_syncdb + # signals, and since we're sending those out with create_table migrations, + # we need apps to behave correctly. + for app_name in settings.INSTALLED_APPS: + try: + __import__(app_name + '.management', {}, {}, ['']) + except ImportError, exc: + msg = exc.args[0] + if not msg.startswith('No module named') or 'management' not in msg: + raise + # END DJANGO DUPE CODE + + # if all_apps flag is set, shift app over to target + if options.get('all_apps', False): + target = app + app = None + + # Migrate each app + if app: + apps = [migration.get_app(app.split(".")[-1])] + if apps == [None]: + print "The app '%s' does not appear to use migrations." % app + print "./manage.py migrate " + self.args + return + else: + apps = migration.get_migrated_apps() + + if list and apps: + list_migrations(apps) + + if not list: + tree = migration.dependency_tree() + + for app in apps: + result = migration.migrate_app( + app, + tree, + resolve_mode = resolve_mode, + target_name = target, + fake = fake, + db_dry_run = db_dry_run, + verbosity = int(options.get('verbosity', 0)), + load_inital_data = not options.get('no_initial_data', False), + skip = skip, + ) + if result is False: + return + + +def list_migrations(apps): + from south.models import MigrationHistory + apps = list(apps) + names = [migration.get_app_name(app) for app in apps] + applied_migrations = MigrationHistory.objects.filter(app_name__in=names) + applied_migrations = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations] + + print + for app in apps: + print migration.get_app_name(app) + all_migrations = migration.get_migration_names(app) + for migration_name in all_migrations: + long_form = '%s.%s' % (migration.get_app_name(app),migration_name) + if long_form in applied_migrations: + print format_migration_list_item(migration_name) + else: + print format_migration_list_item(migration_name, applied=False) + print + + +def format_migration_list_item(name, applied=True): + if applied: + return ' * %s' % name + return ' %s' % name diff --git a/south/management/commands/startmigration.py b/south/management/commands/startmigration.py new file mode 100644 index 000000000..d2639db3e --- /dev/null +++ b/south/management/commands/startmigration.py @@ -0,0 +1,1132 @@ +""" +Startmigration command, version 2. +""" + +import sys +import os +import re +import string +import random +import inspect +import parser +from optparse import make_option + +from django.core.management.base import BaseCommand +from django.core.management.color import no_style +from django.db import models +from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT +from django.contrib.contenttypes.generic import GenericRelation +from django.db.models.fields import FieldDoesNotExist +from django.conf import settings + +try: + set +except NameError: + from sets import Set as set + +from south import migration, modelsinspector + + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--model', action='append', dest='added_model_list', type='string', + help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --model parameters.'), + make_option('--add-field', action='append', dest='added_field_list', type='string', + help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'), + make_option('--add-index', action='append', dest='added_index_list', type='string', + help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'), + make_option('--initial', action='store_true', dest='initial', default=False, + help='Generate the initial schema for the app.'), + make_option('--auto', action='store_true', dest='auto', default=False, + help='Attempt to automatically detect differences from the last migration.'), + make_option('--freeze', action='append', dest='freeze_list', type='string', + help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'), + make_option('--stdout', action='store_true', dest='stdout', default=False, + help='Print the migration to stdout instead of writing it to a file.'), + ) + help = "Creates a new template migration for the given app" + usage_str = "Usage: ./manage.py startmigration appname migrationname [--initial] [--auto] [--model ModelName] [--add-field ModelName.field_name] [--freeze] [--stdout]" + + def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options): + + # Any supposed lists that are None become empty lists + added_model_list = added_model_list or [] + added_field_list = added_field_list or [] + added_index_list = added_index_list or [] + + # --stdout means name = - + if stdout: + name = "-" + + # Make sure options are compatable + if initial and (added_model_list or added_field_list or auto): + print "You cannot use --initial and other options together" + print self.usage_str + return + if auto and (added_model_list or added_field_list or initial): + print "You cannot use --auto and other options together" + print self.usage_str + return + + # specify the default name 'initial' if a name wasn't specified and we're + # doing a migration for an entire app + if not name and initial: + name = 'initial' + + # if not name, there's an error + if not name: + print "You must name this migration" + print self.usage_str + return + + if not app: + print "Please provide an app in which to create the migration." + print self.usage_str + return + + # Make sure the app is short form + app = app.split(".")[-1] + + # See if the app exists + app_models_module = models.get_app(app) + if not app_models_module: + print "App '%s' doesn't seem to exist, isn't in INSTALLED_APPS, or has no models." % app + print self.usage_str + return + + # If they've set SOUTH_AUTO_FREEZE_APP = True (or not set it - defaults to True) + if not hasattr(settings, 'SOUTH_AUTO_FREEZE_APP') or settings.SOUTH_AUTO_FREEZE_APP: + if freeze_list and app not in freeze_list: + freeze_list += [app] + else: + freeze_list = [app] + + # Make the migrations directory if it's not there + app_module_path = app_models_module.__name__.split('.')[0:-1] + try: + app_module = __import__('.'.join(app_module_path), {}, {}, ['']) + except ImportError: + print "Couldn't find path to App '%s'." % app + print self.usage_str + return + + migrations_dir = os.path.join( + os.path.dirname(app_module.__file__), + "migrations", + ) + + # Make sure there's a migrations directory and __init__.py + if not os.path.isdir(migrations_dir): + print "Creating migrations directory at '%s'..." % migrations_dir + os.mkdir(migrations_dir) + init_path = os.path.join(migrations_dir, "__init__.py") + if not os.path.isfile(init_path): + # Touch the init py file + print "Creating __init__.py in '%s'..." % migrations_dir + open(init_path, "w").close() + + # See what filename is next in line. We assume they use numbers. + migrations = migration.get_migration_names(migration.get_app(app)) + highest_number = 0 + for migration_name in migrations: + try: + number = int(migration_name.split("_")[0]) + highest_number = max(highest_number, number) + except ValueError: + pass + + # Make the new filename + new_filename = "%04i%s_%s.py" % ( + highest_number + 1, + "".join([random.choice(string.letters.lower()) for i in range(0)]), # Possible random stuff insertion + name, + ) + + # Find the source file encoding, using PEP 0263's method + encoding = None + first_two_lines = inspect.getsourcelines(app_models_module)[0][:2] + for line in first_two_lines: + if re.search("coding[:=]\s*([-\w.]+)", line): + encoding = line + + # Initialise forwards, backwards and models to blank things + forwards = "" + backwards = "" + frozen_models = {} # Frozen models, used by the Fake ORM + complete_apps = set() # Apps that are completely frozen - useable for diffing. + + # Sets of actions + added_models = set() + deleted_models = [] # Special: contains instances _not_ string keys + added_fields = set() + deleted_fields = [] # Similar to deleted_models + changed_fields = [] # (mkey, fname, old_def, new_def) + added_uniques = set() # (mkey, field_names) + deleted_uniques = set() # (mkey, field_names) + + added_indexes = set() + deleted_indexes = [] + + + # --initial means 'add all models in this app'. + if initial: + for model in models.get_models(app_models_module): + added_models.add("%s.%s" % (app, model._meta.object_name)) + + # Added models might be 'model' or 'app.model'. + for modelname in added_model_list: + if "." in modelname: + added_models.add(modelname) + else: + added_models.add("%s.%s" % (app, modelname)) + + # Fields need translating from "model.field" to (app.model, field) + for fielddef in added_field_list: + try: + modelname, fieldname = fielddef.split(".", 1) + except ValueError: + print "The field specification '%s' is not in modelname.fieldname format." % fielddef + else: + added_fields.add(("%s.%s" % (app, modelname), fieldname)) + + # same thing as above, but for indexes + for fielddef in added_index_list: + try: + modelname, fieldname = fielddef.split(".", 1) + except ValueError: + print "The field specification '%s' is not in modelname.fieldname format." % fielddef + else: + added_indexes.add(("%s.%s" % (app, modelname), fieldname)) + + # Add anything frozen (I almost called the dict Iceland...) + if freeze_list: + for item in freeze_list: + if "." in item: + # It's a specific model + app_name, model_name = item.split(".", 1) + model = models.get_model(app_name, model_name) + if model is None: + print "Cannot find the model '%s' to freeze it." % item + print self.usage_str + return + frozen_models[model] = None + else: + # Get everything in an app! + frozen_models.update(dict([(x, None) for x in models.get_models(models.get_app(item))])) + complete_apps.add(item.split(".")[-1]) + # For every model in the freeze list, add in frozen dependencies + for model in list(frozen_models): + frozen_models.update(model_dependencies(model)) + + + ### Automatic Detection ### + if auto: + # Get the last migration for this app + last_models = None + app_module = migration.get_app(app) + if app_module is None: + print "You cannot use automatic detection on the first migration of an app. Try --initial instead." + else: + migrations = list(migration.get_migration_classes(app_module)) + if not migrations: + print "You cannot use automatic detection on the first migration of an app. Try --initial instead." + else: + if hasattr(migrations[-1], "complete_apps") and \ + app in migrations[-1].complete_apps: + last_models = migrations[-1].models + last_orm = migrations[-1].orm + else: + print "You cannot use automatic detection, since the previous migration does not have this whole app frozen.\nEither make migrations using '--freeze %s' or set 'SOUTH_AUTO_FREEZE_APP = True' in your settings.py." % app + + # Right, did we manage to get the last set of models? + if last_models is None: + print self.usage_str + return + + new = dict([ + (model_key(model), prep_for_freeze(model)) + for model in models.get_models(app_models_module) + if ( + not getattr(model._meta, "proxy", False) and \ + getattr(model._meta, "managed", True) and \ + not getattr(model._meta, "abstract", False) + ) + ]) + # And filter other apps out of the old + old = dict([ + (key, fields) + for key, fields in last_models.items() + if key.split(".", 1)[0] == app + ]) + am, dm, cm, af, df, cf, afu, dfu = models_diff(old, new) + + # For models that were there before and after, do a meta diff + was_meta_change = False + for mkey in cm: + au, du = meta_diff(old[mkey].get("Meta", {}), new[mkey].get("Meta", {})) + for entry in au: + added_uniques.add((mkey, entry)) + was_meta_change = True + for entry in du: + deleted_uniques.add((mkey, entry, last_orm[mkey])) + was_meta_change = True + + if not (am or dm or af or df or cf or afu or dfu or was_meta_change): + print "Nothing seems to have changed." + return + + # Add items to the todo lists + added_models.update(am) + added_fields.update(af) + changed_fields.extend([(m, fn, ot, nt, last_orm) for m, fn, ot, nt in cf]) + + # Deleted models are from the past, and so we use instances instead. + for mkey in dm: + model = last_orm[mkey] + fields = last_models[mkey] + if "Meta" in fields: + del fields['Meta'] + deleted_models.append((model, fields, last_models)) + + # For deleted fields, we tag the instance on the end too + for mkey, fname in df: + deleted_fields.append(( + mkey, + fname, + last_orm[mkey]._meta.get_field_by_name(fname)[0], + last_models[mkey][fname], + last_models, + )) + + # Uniques need merging + added_uniques = added_uniques.union(afu) + + for mkey, entry in dfu: + deleted_uniques.add((mkey, entry, last_orm[mkey])) + + + ### Added model ### + for mkey in added_models: + + print " + Added model '%s'" % (mkey,) + + model = model_unkey(mkey) + + # Add the model's dependencies to the frozens + frozen_models.update(model_dependencies(model)) + # Get the field definitions + fields = modelsinspector.get_model_fields(model) + # Turn the (class, args, kwargs) format into a string + fields = triples_to_defs(app, model, fields) + # Make the code + forwards += CREATE_TABLE_SNIPPET % ( + model._meta.object_name, + model._meta.db_table, + "\n ".join(["('%s', orm[%r])," % (fname, mkey + ":" + fname) for fname, fdef in fields.items()]), + model._meta.app_label, + model._meta.object_name, + ) + # And the backwards code + backwards += DELETE_TABLE_SNIPPET % ( + model._meta.object_name, + model._meta.db_table + ) + # Now add M2M fields to be done + for field in model._meta.local_many_to_many: + added_fields.add((mkey, field.attname)) + # And unique_togethers to be added + for ut in model._meta.unique_together: + added_uniques.add((mkey, tuple(ut))) + + + ### Added fields ### + for mkey, field_name in added_fields: + + # Get the model + model = model_unkey(mkey) + # Get the field + try: + field = model._meta.get_field(field_name) + except FieldDoesNotExist: + print "Model '%s' doesn't have a field '%s'" % (mkey, field_name) + return + + # ManyToMany fields need special attention. + if isinstance(field, models.ManyToManyField): + if not field.rel.through: # Bug #120 + # Add a frozen model for each side + frozen_models[model] = None + frozen_models[field.rel.to] = None + # And a field defn, that's actually a table creation + forwards += CREATE_M2MFIELD_SNIPPET % ( + model._meta.object_name, + field.name, + field.m2m_db_table(), + field.m2m_column_name()[:-3], # strip off the '_id' at the end + poss_ormise(app, model, model._meta.object_name), + field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned + poss_ormise(app, field.rel.to, field.rel.to._meta.object_name) + ) + backwards += DELETE_M2MFIELD_SNIPPET % ( + model._meta.object_name, + field.name, + field.m2m_db_table() + ) + print " + Added M2M '%s.%s'" % (mkey, field_name) + continue + + # GenericRelations need ignoring + if isinstance(field, GenericRelation): + continue + + print " + Added field '%s.%s'" % (mkey, field_name) + + # Add any dependencies + frozen_models.update(field_dependencies(field)) + + # Work out the definition + triple = remove_useless_attributes( + modelsinspector.get_model_fields(model)[field_name]) + + field_definition = make_field_constructor(app, field, triple) + + forwards += CREATE_FIELD_SNIPPET % ( + model._meta.object_name, + field.name, + model._meta.db_table, + field.name, + "orm[%r]" % (mkey + ":" + field.name), + ) + backwards += DELETE_FIELD_SNIPPET % ( + model._meta.object_name, + field.name, + model._meta.db_table, + field.column, + ) + + + ### Deleted fields ### + for mkey, field_name, field, triple, last_models in deleted_fields: + + print " - Deleted field '%s.%s'" % (mkey, field_name) + + # Get the model + model = model_unkey(mkey) + + # ManyToMany fields need special attention. + if isinstance(field, models.ManyToManyField): + # And a field defn, that's actually a table deletion + forwards += DELETE_M2MFIELD_SNIPPET % ( + model._meta.object_name, + field.name, + field.m2m_db_table() + ) + backwards += CREATE_M2MFIELD_SNIPPET % ( + model._meta.object_name, + field.name, + field.m2m_db_table(), + field.m2m_column_name()[:-3], # strip off the '_id' at the end + poss_ormise(app, model, model._meta.object_name), + field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned + poss_ormise(app, field.rel.to, field.rel.to._meta.object_name) + ) + continue + + # Work out the definition + triple = remove_useless_attributes(triple) + field_definition = make_field_constructor(app, field, triple) + + forwards += DELETE_FIELD_SNIPPET % ( + model._meta.object_name, + field.name, + model._meta.db_table, + field.column, + ) + backwards += CREATE_FIELD_SNIPPET % ( + model._meta.object_name, + field.name, + model._meta.db_table, + field.name, + "orm[%r]" % (mkey + ":" + field.name), + ) + + + ### Deleted model ### + for model, fields, last_models in deleted_models: + + print " - Deleted model '%s.%s'" % (model._meta.app_label,model._meta.object_name) + + # Turn the (class, args, kwargs) format into a string + fields = triples_to_defs(app, model, fields) + + # Make the code + forwards += DELETE_TABLE_SNIPPET % ( + model._meta.object_name, + model._meta.db_table + ) + # And the backwards code + backwards += CREATE_TABLE_SNIPPET % ( + model._meta.object_name, + model._meta.db_table, + "\n ".join(["('%s', orm[%r])," % (fname, mkey + ":" + fname) for fname, fdef in fields.items()]), + model._meta.app_label, + model._meta.object_name, + ) + + ### Added indexes. going here, since it might add to added_uniques ### + for mkey, field_name in added_indexes: + # Get the model + model = model_unkey(mkey) + # Get the field + try: + field = model._meta.get_field(field_name) + except FieldDoesNotExist: + print "Model '%s' doesn't have a field '%s'" % (mkey, field_name) + return + + if field.unique: + ut = (mkey, (field.name,)) + added_uniques.add(ut) + + elif field.db_index: + # Create migrations + forwards += CREATE_INDEX_SNIPPET % ( + model._meta.object_name, + field.name, + model._meta.db_table, + field.name, + ) + + backwards += DELETE_INDEX_SNIPPET % ( + model._meta.object_name, + field.name, + model._meta.db_table, + field.column, + ) + print " + Added index for '%s.%s'" % (mkey, field_name) + + else: + print "Field '%s.%s' does not have db_index or unique set to True" % (mkey, field_name) + return + + ### Changed fields ### + for mkey, field_name, old_triple, new_triple, last_orm in changed_fields: + + model = model_unkey(mkey) + + old_def = triples_to_defs(app, model, { + field_name: old_triple, + })[field_name] + new_def = triples_to_defs(app, model, { + field_name: new_triple, + })[field_name] + + # We need to create the fields, to see if it needs _id, or if it's an M2M + field = model._meta.get_field_by_name(field_name)[0] + old_field = last_orm[mkey + ":" + field_name] + + if field.column != old_field.column: + forwards += RENAME_COLUMN_SNIPPET % { + "field_name": field_name, + "old_column": old_field.column, + "new_column": field.column, + } + + if hasattr(field, "m2m_db_table"): + # See if anything has ACTUALLY changed + if old_triple[1] != new_triple[1]: + print " ! Detected change to the target model of M2M field '%s.%s'. South can't handle this; leaving this change out." % (mkey, field_name) + continue + + print " ~ Changed field '%s.%s'." % (mkey, field_name) + + forwards += CHANGE_FIELD_SNIPPET % ( + model._meta.object_name, + field_name, + new_def, + model._meta.db_table, + field.get_attname(), + "orm[%r]" % (mkey + ":" + field.name), + ) + + backwards += CHANGE_FIELD_SNIPPET % ( + model._meta.object_name, + field_name, + old_def, + model._meta.db_table, + field.get_attname(), + "orm[%r]" % (mkey + ":" + field.name), + ) + + if field.column != old_field.column: + backwards += RENAME_COLUMN_SNIPPET % { + "field_name": field_name, + "old_column": field.column, + "new_column": old_field.column, + } + + + ### Added unique_togethers ### + for mkey, ut in added_uniques: + + model = model_unkey(mkey) + if len(ut) == 1: + print " + Added unique for %s on %s." % (", ".join(ut), model._meta.object_name) + else: + print " + Added unique_together for [%s] on %s." % (", ".join(ut), model._meta.object_name) + + cols = [get_field_column(model, f) for f in ut] + + forwards += CREATE_UNIQUE_SNIPPET % ( + ", ".join(ut), + model._meta.object_name, + model._meta.db_table, + cols, + ) + + backwards = DELETE_UNIQUE_SNIPPET % ( + ", ".join(ut), + model._meta.object_name, + model._meta.db_table, + cols, + ) + backwards + + + ### Deleted unique_togethers ### + for mkey, ut, model in deleted_uniques: + + if len(ut) == 1: + print " - Deleted unique for %s on %s." % (", ".join(ut), model._meta.object_name) + else: + print " - Deleted unique_together for [%s] on %s." % (", ".join(ut), model._meta.object_name) + + cols = [get_field_column(model, f) for f in ut] + + forwards = DELETE_UNIQUE_SNIPPET % ( + ", ".join(ut), + model._meta.object_name, + model._meta.db_table, + cols, + ) + forwards + + backwards += CREATE_UNIQUE_SNIPPET % ( + ", ".join(ut), + model._meta.object_name, + model._meta.db_table, + cols, + ) + + + # Default values for forwards/backwards + if (not forwards) and (not backwards): + forwards = '"Write your forwards migration here"' + backwards = '"Write your backwards migration here"' + + all_models = {} + + # Fill out frozen model definitions + for model, last_models in frozen_models.items(): + if hasattr(model._meta, "proxy") and model._meta.proxy: + model = model._meta.proxy_for_model + all_models[model_key(model)] = prep_for_freeze(model, last_models) + + # Do some model cleanup, and warnings + for modelname, model in all_models.items(): + for fieldname, fielddef in model.items(): + # Remove empty-after-cleaning Metas. + if fieldname == "Meta" and not fielddef: + del model['Meta'] + # Warn about undefined fields + elif fielddef is None: + print "WARNING: Cannot get definition for '%s' on '%s'. Please edit the migration manually to define it, or add the south_field_triple method to it." % ( + fieldname, + modelname, + ) + model[fieldname] = FIELD_NEEDS_DEF_SNIPPET + + # So, what's in this file, then? + file_contents = MIGRATION_SNIPPET % ( + encoding or "", '.'.join(app_module_path), + forwards, + backwards, + pprint_frozen_models(all_models), + complete_apps and "complete_apps = [%s]" % (", ".join(map(repr, complete_apps))) or "" + ) + # - is a special name which means 'print to stdout' + if name == "-": + print file_contents + # Write the migration file if the name isn't - + else: + fp = open(os.path.join(migrations_dir, new_filename), "w") + fp.write(file_contents) + fp.close() + print "Created %s." % new_filename + + +### Cleaning functions for freezing + + +def ormise_triple(field, triple): + "Given a 'triple' definition, runs poss_ormise on each arg." + + # If it's a string defn, return it plain. + if not isinstance(triple, (list, tuple)): + return triple + + # For each arg, if it's a related type, try ORMising it. + args = [] + for arg in triple[1]: + if hasattr(field, "rel") and hasattr(field.rel, "to") and field.rel.to: + args.append(poss_ormise(None, field.rel.to, arg)) + else: + args.append(arg) + + return (triple[0], args, triple[2]) + + +def prep_for_freeze(model, last_models=None): + # If we have a set of models to use, use them. + if last_models: + fields = last_models[model_key(model)] + else: + fields = modelsinspector.get_model_fields(model, m2m=True) + # Remove _stub if it stuck in + if "_stub" in fields: + del fields["_stub"] + # Remove useless attributes (like 'choices') + for name, field in fields.items(): + if name == "Meta": + continue + real_field = model._meta.get_field_by_name(name)[0] + fields[name] = ormise_triple(real_field, remove_useless_attributes(field)) + # See if there's a Meta + if last_models: + meta = last_models[model_key(model)].get("Meta", {}) + else: + meta = modelsinspector.get_model_meta(model) + if meta: + fields['Meta'] = remove_useless_meta(meta) + return fields + + +### Module handling functions + +def model_key(model): + "For a given model, return 'appname.modelname'." + return "%s.%s" % (model._meta.app_label, model._meta.object_name.lower()) + +def model_unkey(key): + "For 'appname.modelname', return the model." + app, modelname = key.split(".", 1) + model = models.get_model(app, modelname) + if not model: + print "Couldn't find model '%s' in app '%s'" % (modelname, app) + sys.exit(1) + return model + +### Dependency resolvers + +def model_dependencies(model, last_models=None, checked_models=None): + """ + Returns a set of models this one depends on to be defined; things like + OneToOneFields as ID, ForeignKeys everywhere, etc. + """ + depends = {} + checked_models = checked_models or set() + # Get deps for each field + for field in model._meta.fields + model._meta.many_to_many: + depends.update(field_dependencies(field, last_models)) + # Now recurse + new_to_check = set(depends.keys()) - checked_models + while new_to_check: + checked_model = new_to_check.pop() + if checked_model == model or checked_model in checked_models: + continue + checked_models.add(checked_model) + deps = model_dependencies(checked_model, last_models, checked_models) + # Loop through dependencies... + for dep, value in deps.items(): + # If the new dep is not already checked, add to the queue + if (dep not in depends) and (dep not in new_to_check) and (dep not in checked_models): + new_to_check.add(dep) + depends[dep] = value + return depends + + +def field_dependencies(field, last_models=None, checked_models=None): + checked_models = checked_models or set() + depends = {} + if isinstance(field, (models.OneToOneField, models.ForeignKey, models.ManyToManyField, GenericRelation)): + if field.rel.to in checked_models: + return depends + checked_models.add(field.rel.to) + depends[field.rel.to] = last_models + depends.update(field_dependencies(field.rel.to._meta.pk, last_models, checked_models)) + return depends + + + +### Prettyprinters + +def pprint_frozen_models(models): + return "{\n %s\n }" % ",\n ".join([ + "%r: %s" % (name, pprint_fields(fields)) + for name, fields in sorted(models.items()) + ]) + +def pprint_fields(fields): + return "{\n %s\n }" % ",\n ".join([ + "%r: %r" % (name, defn) + for name, defn in sorted(fields.items()) + ]) + + +### Output sanitisers + + +USELESS_KEYWORDS = ["choices", "help_text", "upload_to", "verbose_name"] +USELESS_DB_KEYWORDS = ["related_name", "default"] # Important for ORM, not for DB. + +def remove_useless_attributes(field, db=False): + "Removes useless (for database) attributes from the field's defn." + keywords = db and USELESS_DB_KEYWORDS or USELESS_KEYWORDS + if field: + for name in keywords: + if name in field[2]: + del field[2][name] + return field + +USELESS_META = ["verbose_name", "verbose_name_plural"] +def remove_useless_meta(meta): + "Removes useless (for database) attributes from the table's meta." + if meta: + for name in USELESS_META: + if name in meta: + del meta[name] + return meta + + +### Turns (class, args, kwargs) triples into function defs. + +def make_field_constructor(default_app, field, triple): + """ + Given the defualt app, the field class, + and the defn triple (or string), make the definition string. + """ + # It might be None; return a placeholder + if triple is None: + return FIELD_NEEDS_DEF_SNIPPET + # It might be a defn string already... + if isinstance(triple, (str, unicode)): + return triple + # OK, do it the hard way + if hasattr(field, "rel") and hasattr(field.rel, "to") and field.rel.to: + rel_to = field.rel.to + else: + rel_to = None + args = [poss_ormise(default_app, rel_to, arg) for arg in triple[1]] + kwds = ["%s=%s" % (k, poss_ormise(default_app, rel_to, v)) for k,v in triple[2].items()] + return "%s(%s)" % (triple[0], ", ".join(args+kwds)) + +QUOTES = ['"""', "'''", '"', "'"] + +def poss_ormise(default_app, rel_to, arg): + """ + Given the name of something that needs orm. stuck on the front and + a python eval-able string, possibly add orm. to it. + """ + orig_arg = arg + # If it's not a relative field, short-circuit out + if not rel_to: + return arg + # Get the name of the other model + rel_name = rel_to._meta.object_name + # Is it in a different app? If so, use proper addressing. + if rel_to._meta.app_label != default_app: + real_name = "orm['%s.%s']" % (rel_to._meta.app_label, rel_name) + else: + real_name = "orm.%s" % rel_name + # If it's surrounded by quotes, get rid of those + for quote_type in QUOTES: + l = len(quote_type) + if arg[:l] == quote_type and arg[-l:] == quote_type: + arg = arg[l:-l] + break + # Now see if we can replace it. + if arg.lower() == rel_name.lower(): + return real_name + # Or perhaps it's app.model? + if arg.lower() == rel_to._meta.app_label.lower() + "." + rel_name.lower(): + return real_name + # Or perhaps it's 'self'? + if arg == RECURSIVE_RELATIONSHIP_CONSTANT: + return real_name + return orig_arg + + +### Diffing functions between sets of models + +def models_diff(old, new): + """ + Returns the difference between the old and new sets of models as a 5-tuple: + added_models, deleted_models, added_fields, deleted_fields, changed_fields + """ + + added_models = set() + deleted_models = set() + ignored_models = set() # Stubs for backwards + continued_models = set() # Models that existed before and after + added_fields = set() + deleted_fields = set() + changed_fields = [] + added_uniques = set() + deleted_uniques = set() + + # See if anything's vanished + for key in old: + if key not in new: + if "_stub" not in old[key]: + deleted_models.add(key) + else: + ignored_models.add(key) + + # Or appeared + for key in new: + if key not in old: + added_models.add(key) + + # Now, for every model that's stayed the same, check its fields. + for key in old: + if key not in deleted_models and key not in ignored_models: + continued_models.add(key) + still_there = set() + # Find fields that have vanished. + for fieldname in old[key]: + if fieldname != "Meta" and fieldname not in new[key]: + deleted_fields.add((key, fieldname)) + else: + still_there.add(fieldname) + # And ones that have appeared + for fieldname in new[key]: + if fieldname != "Meta" and fieldname not in old[key]: + added_fields.add((key, fieldname)) + # For the ones that exist in both models, see if they were changed + for fieldname in still_there: + if fieldname != "Meta": + if different_attributes( + remove_useless_attributes(old[key][fieldname], True), + remove_useless_attributes(new[key][fieldname], True)): + changed_fields.append((key, fieldname, old[key][fieldname], new[key][fieldname])) + # See if their uniques have changed + old_triple = old[key][fieldname] + new_triple = new[key][fieldname] + if is_triple(old_triple) and is_triple(new_triple): + if old_triple[2].get("unique", "False") != new_triple[2].get("unique", "False"): + # Make sure we look at the one explicitly given to see what happened + if "unique" in old_triple[2]: + if old_triple[2]['unique'] == "False": + added_uniques.add((key, (fieldname,))) + else: + deleted_uniques.add((key, (fieldname,))) + else: + if new_triple[2]['unique'] == "False": + deleted_uniques.add((key, (fieldname,))) + else: + added_uniques.add((key, (fieldname,))) + + return added_models, deleted_models, continued_models, added_fields, deleted_fields, changed_fields, added_uniques, deleted_uniques + + +def is_triple(triple): + "Returns whether the argument is a triple." + return isinstance(triple, (list, tuple)) and len(triple) == 3 and \ + isinstance(triple[0], (str, unicode)) and \ + isinstance(triple[1], (list, tuple)) and \ + isinstance(triple[2], dict) + + +def different_attributes(old, new): + """ + Backwards-compat comparison that ignores orm. on the RHS and not the left + and which knows django.db.models.fields.CharField = models.CharField. + Has a whole load of tests in tests/autodetectoion.py. + """ + + # If they're not triples, just do normal comparison + if not is_triple(old) or not is_triple(new): + return old != new + + # Expand them out into parts + old_field, old_pos, old_kwd = old + new_field, new_pos, new_kwd = new + + # Copy the positional and keyword arguments so we can compare them and pop off things + old_pos, new_pos = old_pos[:], new_pos[:] + old_kwd = dict(old_kwd.items()) + new_kwd = dict(new_kwd.items()) + + # Remove comparison of the existence of 'unique', that's done elsewhere. + # TODO: Make this work for custom fields where unique= means something else? + if "unique" in old_kwd: + del old_kwd['unique'] + if "unique" in new_kwd: + del new_kwd['unique'] + + # If the first bit is different, check it's not by dj.db.models... + if old_field != new_field: + if old_field.startswith("models.") and (new_field.startswith("django.db.models") \ + or new_field.startswith("django.contrib.gis")): + if old_field.split(".")[-1] != new_field.split(".")[-1]: + return True + else: + # Remove those fields from the final comparison + old_field = new_field = "" + + # If there's a positional argument in the first, and a 'to' in the second, + # see if they're actually comparable. + if (old_pos and "to" in new_kwd) and ("orm" in new_kwd['to'] and "orm" not in old_pos[0]): + # Do special comparison to fix #153 + try: + if old_pos[0] != new_kwd['to'].split("'")[1].split(".")[1]: + return True + except IndexError: + pass # Fall back to next comparison + # Remove those attrs from the final comparison + old_pos = old_pos[1:] + del new_kwd['to'] + + return old_field != new_field or old_pos != new_pos or old_kwd != new_kwd + + + + +def meta_diff(old, new): + """ + Diffs the two provided Meta definitions (dicts). + """ + + # First, diff unique_together + old_unique_together = eval(old.get('unique_together', "[]")) + new_unique_together = eval(new.get('unique_together', "[]")) + + added_uniques = set() + removed_uniques = set() + + for entry in old_unique_together: + if entry not in new_unique_together: + removed_uniques.add(tuple(entry)) + + for entry in new_unique_together: + if entry not in old_unique_together: + added_uniques.add(tuple(entry)) + + return added_uniques, removed_uniques + + +### Used to work out what columns any fields affect ### + +def get_field_column(model, field_name): + return model._meta.get_field_by_name(field_name)[0].column + + +### Creates SQL snippets for various common operations + + +def triples_to_defs(app, model, fields): + # Turn the (class, args, kwargs) format into a string + for field, triple in fields.items(): + triple = remove_useless_attributes(triple) + if triple is None: + print "WARNING: Cannot get definition for '%s' on '%s'. Please edit the migration manually." % ( + field, + model_key(model), + ) + fields[field] = FIELD_NEEDS_DEF_SNIPPET + else: + fields[field] = make_field_constructor( + app, + model._meta.get_field_by_name(field)[0], + triple, + ) + return fields + + +### Various code snippets we need to use + +MIGRATION_SNIPPET = """%s +from south.db import db +from django.db import models +from %s.models import * + +class Migration: + + def forwards(self, orm): + %s + + + def backwards(self, orm): + %s + + + models = %s + + %s +""" +CREATE_TABLE_SNIPPET = ''' + # Adding model '%s' + db.create_table(%r, ( + %s + )) + db.send_create_signal(%r, [%r]) + ''' +DELETE_TABLE_SNIPPET = ''' + # Deleting model '%s' + db.delete_table(%r) + ''' +CREATE_FIELD_SNIPPET = ''' + # Adding field '%s.%s' + db.add_column(%r, %r, %s) + ''' +DELETE_FIELD_SNIPPET = ''' + # Deleting field '%s.%s' + db.delete_column(%r, %r) + ''' +CHANGE_FIELD_SNIPPET = ''' + # Changing field '%s.%s' + # (to signature: %s) + db.alter_column(%r, %r, %s) + ''' +CREATE_M2MFIELD_SNIPPET = ''' + # Adding ManyToManyField '%s.%s' + db.create_table('%s', ( + ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), + ('%s', models.ForeignKey(%s, null=False)), + ('%s', models.ForeignKey(%s, null=False)) + )) + ''' +DELETE_M2MFIELD_SNIPPET = ''' + # Dropping ManyToManyField '%s.%s' + db.delete_table('%s') + ''' +CREATE_UNIQUE_SNIPPET = ''' + # Creating unique_together for [%s] on %s. + db.create_unique(%r, %r) + ''' +DELETE_UNIQUE_SNIPPET = ''' + # Deleting unique_together for [%s] on %s. + db.delete_unique(%r, %r) + ''' +RENAME_COLUMN_SNIPPET = ''' + # Renaming column for field '%(field_name)s'. + db.rename_column(%(old_column)r, %(new_column)r) + ''' +FIELD_NEEDS_DEF_SNIPPET = "<< PUT FIELD DEFINITION HERE >>" + +CREATE_INDEX_SNIPPET = ''' + # Adding index on '%s.%s' + db.create_index(%r, [%r]) + ''' +DELETE_INDEX_SNIPPET = ''' + # Deleting index on '%s.%s' + db.delete_index(%r, [%r]) + ''' \ No newline at end of file diff --git a/south/management/commands/syncdb.py b/south/management/commands/syncdb.py new file mode 100644 index 000000000..5e0c1fc8c --- /dev/null +++ b/south/management/commands/syncdb.py @@ -0,0 +1,83 @@ +from django.core.management.base import NoArgsCommand, BaseCommand +from django.core.management.color import no_style +from django.utils.datastructures import SortedDict +from optparse import make_option +from south import migration +from south.db import db +from django.core.management.commands import syncdb +from django.conf import settings +from django.db import models +from django.db.models.loading import cache +from django.core import management +import sys + +def get_app_name(app): + return '.'.join( app.__name__.split('.')[0:-1] ) + +class Command(NoArgsCommand): + option_list = NoArgsCommand.option_list + ( + make_option('--noinput', action='store_false', dest='interactive', default=True, + help='Tells Django to NOT prompt the user for input of any kind.'), + make_option('--migrate', action='store_true', dest='migrate', default=False, + help='Tells South to also perform migrations after the sync. Default for during testing, and other internal calls.'), + make_option('--all', action='store_true', dest='migrate_all', default=False, + help='Makes syncdb work on all apps, even migrated ones. Be careful!'), + ) + if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]: + option_list += ( + make_option('--verbosity', action='store', dest='verbosity', default='1', + type='choice', choices=['0', '1', '2'], + help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'), + ) + help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created, except those which use migrations." + + def handle_noargs(self, migrate_all=False, **options): + # Work out what uses migrations and so doesn't need syncing + apps_needing_sync = [] + apps_migrated = [] + for app in models.get_apps(): + app_name = get_app_name(app) + migrations = migration.get_app(app) + if migrations is None or migrate_all: + apps_needing_sync.append(app_name) + else: + # This is a migrated app, leave it + apps_migrated.append(app_name) + verbosity = int(options.get('verbosity', 0)) + + # Run syncdb on only the ones needed + if verbosity: + print "Syncing..." + + old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync + old_app_store, cache.app_store = cache.app_store, SortedDict([ + (k, v) for (k, v) in cache.app_store.items() + if get_app_name(k) in apps_needing_sync + ]) + + # This will allow the setting of the MySQL storage engine, for example. + db.connection_init() + + # OK, run the actual syncdb + syncdb.Command().execute(**options) + + settings.INSTALLED_APPS = old_installed + cache.app_store = old_app_store + + # Migrate if needed + if options.get('migrate', True): + if verbosity: + print "Migrating..." + management.call_command('migrate', **options) + + # Be obvious about what we did + if verbosity: + print "\nSynced:\n > %s" % "\n > ".join(apps_needing_sync) + + if options.get('migrate', True): + if verbosity: + print "\nMigrated:\n - %s" % "\n - ".join(apps_migrated) + else: + if verbosity: + print "\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated) + print "(use ./manage.py migrate to migrate these)" diff --git a/south/management/commands/test.py b/south/management/commands/test.py new file mode 100644 index 000000000..0607b53c8 --- /dev/null +++ b/south/management/commands/test.py @@ -0,0 +1,27 @@ +from django.core import management +from django.core.management.commands import test +from django.core.management.commands import syncdb +from django.conf import settings + +from syncdb import Command as SyncDbCommand + + +class MigrateAndSyncCommand(SyncDbCommand): + option_list = SyncDbCommand.option_list + for opt in option_list: + if "--migrate" == opt.get_opt_string(): + opt.default = True + break + + +class Command(test.Command): + + def handle(self, *args, **kwargs): + management.get_commands() + if not hasattr(settings, "SOUTH_TESTS_MIGRATE") or not settings.SOUTH_TESTS_MIGRATE: + # point at the core syncdb command when creating tests + # tests should always be up to date with the most recent model structure + management._commands['syncdb'] = 'django.core' + else: + management._commands['syncdb'] = MigrateAndSyncCommand() + super(Command, self).handle(*args, **kwargs) \ No newline at end of file diff --git a/south/management/commands/testserver.py b/south/management/commands/testserver.py new file mode 100644 index 000000000..385955014 --- /dev/null +++ b/south/management/commands/testserver.py @@ -0,0 +1,27 @@ +from django.core import management +from django.core.management.commands import testserver +from django.core.management.commands import syncdb +from django.conf import settings + +from syncdb import Command as SyncDbCommand + + +class MigrateAndSyncCommand(SyncDbCommand): + option_list = SyncDbCommand.option_list + for opt in option_list: + if "--migrate" == opt.get_opt_string(): + opt.default = True + break + + +class Command(testserver.Command): + + def handle(self, *args, **kwargs): + management.get_commands() + if not hasattr(settings, "SOUTH_TESTS_MIGRATE") or not settings.SOUTH_TESTS_MIGRATE: + # point at the core syncdb command when creating tests + # tests should always be up to date with the most recent model structure + management._commands['syncdb'] = 'django.core' + else: + management._commands['syncdb'] = MigrateAndSyncCommand() + super(Command, self).handle(*args, **kwargs) \ No newline at end of file diff --git a/south/migration.py b/south/migration.py new file mode 100644 index 000000000..7a731329a --- /dev/null +++ b/south/migration.py @@ -0,0 +1,612 @@ +""" +Main migration logic. +""" + +import datetime +import os +import sys +import traceback +import inspect + +from django.conf import settings +from django.db import models +from django.core.exceptions import ImproperlyConfigured +from django.core.management import call_command + +from south.models import MigrationHistory +from south.db import db +from south.orm import LazyFakeORM, FakeORM +from south.signals import * + +def get_app(app): + """ + Returns the migrations module for the given app model name/module, or None + if it does not use migrations. + """ + if isinstance(app, (str, unicode)): + # If it's a string, use the models module + app = models.get_app(app) + mod = __import__(app.__name__[:-7], {}, {}, ['migrations']) + if hasattr(mod, 'migrations'): + return getattr(mod, 'migrations') + + +def get_migrated_apps(): + """ + Returns all apps with migrations. + """ + for mapp in models.get_apps(): + app = get_app(mapp) + if app: + yield app + + +def get_app_name(app): + """ + Returns the _internal_ app name for the given app module. + i.e. for will return 'auth' + """ + return app.__name__.split('.')[-2] + + +def get_app_fullname(app): + """ + Returns the full python name of an app - e.g. django.contrib.auth + """ + return app.__name__[:-11] + + +def short_from_long(app_name): + return app_name.split(".")[-1] + + +def get_migration_names(app): + """ + Returns a list of migration file names for the given app. + """ + if getattr(settings, "SOUTH_USE_PYC", False): + allowed_extensions = (".pyc", ".py") + ignored_files = ("__init__.pyc", "__init__.py") + else: + allowed_extensions = (".py",) + ignored_files = ("__init__.py",) + + return sorted(set([ + os.path.splitext(filename)[0] + for filename in os.listdir(os.path.dirname(app.__file__)) + if os.path.splitext(filename)[1] in allowed_extensions and filename not in ignored_files and not filename.startswith(".") + ])) + + +def get_migration_classes(app): + """ + Returns a list of migration classes (one for each migration) for the app. + """ + for name in get_migration_names(app): + yield get_migration(app, name) + + +def get_migration(app, name): + """ + Returns the migration class implied by 'name'. + """ + try: + module = __import__(app.__name__ + "." + name, '', '', ['Migration']) + migclass = module.Migration + migclass.orm = LazyFakeORM(migclass, get_app_name(app)) + module._ = lambda x: x # Fake i18n + module.datetime = datetime + return migclass + except ImportError: + print " ! Migration %s:%s probably doesn't exist." % (get_app_name(app), name) + print " - Traceback:" + raise + except Exception: + print "While loading migration '%s.%s':" % (get_app_name(app), name) + raise + + +def all_migrations(): + return dict([ + (app, dict([(name, get_migration(app, name)) for name in get_migration_names(app)])) + for app in get_migrated_apps() + ]) + + +def dependency_tree(): + tree = all_migrations() + + # Annotate tree with 'backwards edges' + for app, classes in tree.items(): + for name, cls in classes.items(): + if not hasattr(cls, "_dependency_parents"): + cls._dependency_parents = [] + if not hasattr(cls, "_dependency_children"): + cls._dependency_children = [] + # Get forwards dependencies + if hasattr(cls, "depends_on"): + for dapp, dname in cls.depends_on: + dapp = get_app(dapp) + if dapp not in tree: + print "Migration %s in app %s depends on unmigrated app %s." % ( + name, + get_app_name(app), + dapp, + ) + sys.exit(1) + if dname not in tree[dapp]: + print "Migration %s in app %s depends on nonexistent migration %s in app %s." % ( + name, + get_app_name(app), + dname, + get_app_name(dapp), + ) + sys.exit(1) + cls._dependency_parents.append((dapp, dname)) + if not hasattr(tree[dapp][dname], "_dependency_children"): + tree[dapp][dname]._dependency_children = [] + tree[dapp][dname]._dependency_children.append((app, name)) + # Get backwards dependencies + if hasattr(cls, "needed_by"): + for dapp, dname in cls.needed_by: + dapp = get_app(dapp) + if dapp not in tree: + print "Migration %s in app %s claims to be needed by unmigrated app %s." % ( + name, + get_app_name(app), + dapp, + ) + sys.exit(1) + if dname not in tree[dapp]: + print "Migration %s in app %s claims to be needed by nonexistent migration %s in app %s." % ( + name, + get_app_name(app), + dname, + get_app_name(dapp), + ) + sys.exit(1) + cls._dependency_children.append((dapp, dname)) + if not hasattr(tree[dapp][dname], "_dependency_parents"): + tree[dapp][dname]._dependency_parents = [] + tree[dapp][dname]._dependency_parents.append((app, name)) + + # Sanity check whole tree + for app, classes in tree.items(): + for name, cls in classes.items(): + cls.dependencies = dependencies(tree, app, name) + + return tree + + +def nice_trace(trace): + return " -> ".join([str((get_app_name(a), n)) for a, n in trace]) + + +def dependencies(tree, app, name, trace=[]): + # Copy trace to stop pass-by-ref problems + trace = trace[:] + # Sanity check + for papp, pname in trace: + if app == papp: + if pname == name: + print "Found circular dependency: %s" % nice_trace(trace + [(app,name)]) + sys.exit(1) + else: + # See if they depend in the same app the wrong way + migrations = get_migration_names(app) + if migrations.index(name) > migrations.index(pname): + print "Found a lower migration (%s) depending on a higher migration (%s) in the same app (%s)." % (pname, name, get_app_name(app)) + print "Path: %s" % nice_trace(trace + [(app,name)]) + sys.exit(1) + # Get the dependencies of a migration + deps = [] + migration = tree[app][name] + for dapp, dname in migration._dependency_parents: + deps.extend( + dependencies(tree, dapp, dname, trace+[(app,name)]) + ) + return deps + + +def remove_duplicates(l): + m = [] + for x in l: + if x not in m: + m.append(x) + return m + + +def needed_before_forwards(tree, app, name, sameapp=True): + """ + Returns a list of migrations that must be applied before (app, name), + in the order they should be applied. + Used to make sure a migration can be applied (and to help apply up to it). + """ + app_migrations = get_migration_names(app) + needed = [] + if sameapp: + for aname in app_migrations[:app_migrations.index(name)]: + needed += needed_before_forwards(tree, app, aname, False) + needed += [(app, aname)] + for dapp, dname in tree[app][name]._dependency_parents: + needed += needed_before_forwards(tree, dapp, dname) + needed += [(dapp, dname)] + return remove_duplicates(needed) + + +def needed_before_backwards(tree, app, name, sameapp=True): + """ + Returns a list of migrations that must be unapplied before (app, name) is, + in the order they should be unapplied. + Used to make sure a migration can be unapplied (and to help unapply up to it). + """ + app_migrations = get_migration_names(app) + needed = [] + if sameapp: + for aname in reversed(app_migrations[app_migrations.index(name)+1:]): + needed += needed_before_backwards(tree, app, aname, False) + needed += [(app, aname)] + for dapp, dname in tree[app][name]._dependency_children: + needed += needed_before_backwards(tree, dapp, dname) + needed += [(dapp, dname)] + return remove_duplicates(needed) + + +def run_migrations(toprint, torun, recorder, app, migrations, fake=False, db_dry_run=False, verbosity=0): + """ + Runs the specified migrations forwards/backwards, in order. + """ + for migration in migrations: + app_name = get_app_name(app) + if verbosity: + print toprint % (app_name, migration) + + # Get migration class + klass = get_migration(app, migration) + # Find its predecessor, and attach the ORM from that as prev_orm. + all_names = get_migration_names(app) + idx = all_names.index(migration) + # First migration? The 'previous ORM' is empty. + if idx == 0: + klass.prev_orm = FakeORM(None, app) + else: + klass.prev_orm = get_migration(app, all_names[idx-1]).orm + + # If this is a 'fake' migration, do nothing. + if fake: + if verbosity: + print " (faked)" + + # OK, we should probably do something then. + else: + runfunc = getattr(klass(), torun) + args = inspect.getargspec(runfunc) + + # Get the correct ORM. + if torun == "forwards": + orm = klass.orm + else: + orm = klass.prev_orm + + db.current_orm = orm + + # If the database doesn't support running DDL inside a transaction + # *cough*MySQL*cough* then do a dry run first. + if not db.has_ddl_transactions or db_dry_run: + if not (hasattr(klass, "no_dry_run") and klass.no_dry_run): + db.dry_run = True + # Only hide SQL if this is an automatic dry run. + if not db.has_ddl_transactions: + db.debug, old_debug = False, db.debug + pending_creates = db.get_pending_creates() + db.start_transaction() + try: + if len(args[0]) == 1: # They don't want an ORM param + runfunc() + else: + runfunc(orm) + db.rollback_transactions_dry_run() + except: + traceback.print_exc() + print " ! Error found during dry run of migration! Aborting." + return False + if not db.has_ddl_transactions: + db.debug = old_debug + db.clear_run_data(pending_creates) + db.dry_run = False + elif db_dry_run: + print " - Migration '%s' is marked for no-dry-run." % migration + # If they really wanted to dry-run, then quit! + if db_dry_run: + return + + if db.has_ddl_transactions: + db.start_transaction() + try: + if len(args[0]) == 1: # They don't want an ORM param + runfunc() + else: + runfunc(orm) + db.execute_deferred_sql() + except: + if db.has_ddl_transactions: + db.rollback_transaction() + raise + else: + traceback.print_exc() + print " ! Error found during real run of migration! Aborting." + print + print " ! Since you have a database that does not support running" + print " ! schema-altering statements in transactions, we have had to" + print " ! leave it in an interim state between migrations." + if torun == "forwards": + print + print " ! You *might* be able to recover with:" + db.debug = db.dry_run = True + if len(args[0]) == 1: + klass().backwards() + else: + klass().backwards(klass.prev_orm) + print + print " ! The South developers regret this has happened, and would" + print " ! like to gently persuade you to consider a slightly" + print " ! easier-to-deal-with DBMS." + return False + else: + if db.has_ddl_transactions: + db.commit_transaction() + + if not db_dry_run: + # Record us as having done this + recorder(app_name, migration) + if not fake: + # Send a signal saying it ran + # Actually, don't - we're implementing this properly in 0.7 + #ran_migration.send(None, app=app_name, migration=migration, method=torun) + pass + + +def run_forwards(app, migrations, fake=False, db_dry_run=False, verbosity=0): + """ + Runs the specified migrations forwards, in order. + """ + + def record(app_name, migration): + # Record us as having done this + record = MigrationHistory.for_migration(app_name, migration) + record.applied = datetime.datetime.utcnow() + record.save() + + return run_migrations( + toprint = " > %s: %s", + torun = "forwards", + recorder = record, + app = app, + migrations = migrations, + fake = fake, + db_dry_run = db_dry_run, + verbosity = verbosity, + ) + + +def run_backwards(app, migrations, ignore=[], fake=False, db_dry_run=False, verbosity=0): + """ + Runs the specified migrations backwards, in order, skipping those + migrations in 'ignore'. + """ + + def record(app_name, migration): + # Record us as having not done this + record = MigrationHistory.for_migration(app_name, migration) + record.delete() + + return run_migrations( + toprint = " < %s: %s", + torun = "backwards", + recorder = record, + app = app, + migrations = [x for x in migrations if x not in ignore], + fake = fake, + db_dry_run = db_dry_run, + verbosity = verbosity, + ) + + +def right_side_of(x, y): + return left_side_of(reversed(x), reversed(y)) + + +def left_side_of(x, y): + return list(y)[:len(x)] == list(x) + + +def forwards_problems(tree, forwards, done, verbosity=0): + problems = [] + for app, name in forwards: + if (app, name) not in done: + for dapp, dname in needed_before_backwards(tree, app, name): + if (dapp, dname) in done: + print " ! Migration (%s, %s) should not have been applied before (%s, %s) but was." % (get_app_name(dapp), dname, get_app_name(app), name) + problems.append(((app, name), (dapp, dname))) + return problems + + + +def backwards_problems(tree, backwards, done, verbosity=0): + problems = [] + for app, name in backwards: + if (app, name) in done: + for dapp, dname in needed_before_forwards(tree, app, name): + if (dapp, dname) not in done: + print " ! Migration (%s, %s) should have been applied before (%s, %s) but wasn't." % (get_app_name(dapp), dname, get_app_name(app), name) + problems.append(((app, name), (dapp, dname))) + return problems + + +def migrate_app(app, tree, target_name=None, resolve_mode=None, fake=False, db_dry_run=False, yes=False, verbosity=0, load_inital_data=False, skip=False): + + app_name = get_app_name(app) + verbosity = int(verbosity) + db.debug = (verbosity > 1) + + # Fire off the pre-migrate signal + pre_migrate.send(None, app=app_name) + + # Find out what delightful migrations we have + migrations = get_migration_names(app) + + # If there aren't any, quit quizically + if not migrations: + print "? You have no migrations for the '%s' app. You might want some." % app_name + return + + if target_name not in migrations and target_name not in ["zero", None]: + matches = [x for x in migrations if x.startswith(target_name)] + if len(matches) == 1: + target = migrations.index(matches[0]) + 1 + if verbosity: + print " - Soft matched migration %s to %s." % ( + target_name, + matches[0] + ) + target_name = matches[0] + elif len(matches) > 1: + if verbosity: + print " - Prefix %s matches more than one migration:" % target_name + print " " + "\n ".join(matches) + return + else: + print " ! '%s' is not a migration." % target_name + return + + # Check there's no strange ones in the database + ghost_migrations = [] + for m in MigrationHistory.objects.filter(applied__isnull = False): + try: + if get_app(m.app_name) not in tree or m.migration not in tree[get_app(m.app_name)]: + ghost_migrations.append(m) + except ImproperlyConfigured: + pass + + if ghost_migrations: + print " ! These migrations are in the database but not on disk:" + print " - " + "\n - ".join(["%s: %s" % (x.app_name, x.migration) for x in ghost_migrations]) + print " ! I'm not trusting myself; fix this yourself by fiddling" + print " ! with the south_migrationhistory table." + return + + # Say what we're doing + if verbosity: + print "Running migrations for %s:" % app_name + + # Get the forwards and reverse dependencies for this target + if target_name == None: + target_name = migrations[-1] + if target_name == "zero": + forwards = [] + backwards = needed_before_backwards(tree, app, migrations[0]) + [(app, migrations[0])] + else: + forwards = needed_before_forwards(tree, app, target_name) + [(app, target_name)] + # When migrating backwards we want to remove up to and including + # the next migration up in this app (not the next one, that includes other apps) + try: + migration_before_here = migrations[migrations.index(target_name)+1] + backwards = needed_before_backwards(tree, app, migration_before_here) + [(app, migration_before_here)] + except IndexError: + backwards = [] + + # Get the list of currently applied migrations from the db + current_migrations = [] + for m in MigrationHistory.objects.filter(applied__isnull = False): + try: + current_migrations.append((get_app(m.app_name), m.migration)) + except ImproperlyConfigured: + pass + + direction = None + bad = False + + # Work out the direction + applied_for_this_app = list(MigrationHistory.objects.filter(app_name=app_name, applied__isnull=False).order_by("migration")) + if target_name == "zero": + direction = -1 + elif not applied_for_this_app: + direction = 1 + elif migrations.index(target_name) > migrations.index(applied_for_this_app[-1].migration): + direction = 1 + elif migrations.index(target_name) < migrations.index(applied_for_this_app[-1].migration): + direction = -1 + else: + direction = None + + # Is the whole forward branch applied? + missing = [step for step in forwards if step not in current_migrations] + # If they're all applied, we only know it's not backwards + if not missing: + direction = None + # If the remaining migrations are strictly a right segment of the forwards + # trace, we just need to go forwards to our target (and check for badness) + else: + problems = forwards_problems(tree, forwards, current_migrations, verbosity=verbosity) + if problems: + bad = True + direction = 1 + + # What about the whole backward trace then? + if not bad: + missing = [step for step in backwards if step not in current_migrations] + # If they're all missing, stick with the forwards decision + if missing == backwards: + pass + # If what's missing is a strict left segment of backwards (i.e. + # all the higher migrations) then we need to go backwards + else: + problems = backwards_problems(tree, backwards, current_migrations, verbosity=verbosity) + if problems: + bad = True + direction = -1 + + if bad and resolve_mode not in ['merge'] and not skip: + print " ! Inconsistent migration history" + print " ! The following options are available:" + print " --merge: will just attempt the migration ignoring any potential dependency conflicts." + sys.exit(1) + + if direction == 1: + if verbosity: + print " - Migrating forwards to %s." % target_name + try: + for mapp, mname in forwards: + if (mapp, mname) not in current_migrations: + result = run_forwards(mapp, [mname], fake=fake, db_dry_run=db_dry_run, verbosity=verbosity) + if result is False: # The migrations errored, but nicely. + return False + finally: + # Call any pending post_syncdb signals + db.send_pending_create_signals() + # Now load initial data, only if we're really doing things and ended up at current + if not fake and not db_dry_run and load_inital_data and target_name == migrations[-1]: + if verbosity: + print " - Loading initial data for %s." % app_name + # Override Django's get_apps call temporarily to only load from the + # current app + old_get_apps, models.get_apps = ( + models.get_apps, + lambda: [models.get_app(get_app_name(app))], + ) + # Load the initial fixture + call_command('loaddata', 'initial_data', verbosity=verbosity) + # Un-override + models.get_apps = old_get_apps + elif direction == -1: + if verbosity: + print " - Migrating backwards to just after %s." % target_name + for mapp, mname in backwards: + if (mapp, mname) in current_migrations: + run_backwards(mapp, [mname], fake=fake, db_dry_run=db_dry_run, verbosity=verbosity) + else: + if verbosity: + print "- Nothing to migrate." + + # Finally, fire off the post-migrate signal + post_migrate.send(None, app=app_name) diff --git a/south/models.py b/south/models.py new file mode 100644 index 000000000..e95c79af7 --- /dev/null +++ b/south/models.py @@ -0,0 +1,19 @@ +from django.db import models + +class MigrationHistory(models.Model): + app_name = models.CharField(max_length=255) + migration = models.CharField(max_length=255) + applied = models.DateTimeField(blank=True, null=True) + + @classmethod + def for_migration(cls, app_name, migration): + try: + return cls.objects.get( + app_name = app_name, + migration = migration, + ) + except cls.DoesNotExist: + return cls( + app_name = app_name, + migration = migration, + ) \ No newline at end of file diff --git a/south/modelsinspector.py b/south/modelsinspector.py new file mode 100644 index 000000000..a63254733 --- /dev/null +++ b/south/modelsinspector.py @@ -0,0 +1,322 @@ +""" +Like south.modelsparser, but using introspection where possible +rather than direct inspection of models.py. +""" + +import datetime +import re + +import modelsparser +from south.utils import get_attribute + +from django.db import models +from django.db.models.base import ModelBase, Model +from django.db.models.fields import NOT_PROVIDED +from django.conf import settings +from django.utils.functional import Promise +from django.contrib.contenttypes import generic +from django.utils.datastructures import SortedDict + +NOISY = True + +# Gives information about how to introspect certain fields. +# This is a list of triples; the first item is a list of fields it applies to, +# (note that isinstance is used, so superclasses are perfectly valid here) +# the second is a list of positional argument descriptors, and the third +# is a list of keyword argument descriptors. +# Descriptors are of the form: +# [attrname, options] +# Where attrname is the attribute on the field to get the value from, and options +# is an optional dict. +# +# The introspector uses the combination of all matching entries, in order. +introspection_details = [ + ( + (models.Field, ), + [], + { + "null": ["null", {"default": False}], + "blank": ["blank", {"default": False, "ignore_if":"primary_key"}], + "primary_key": ["primary_key", {"default": False}], + "max_length": ["max_length", {"default": None}], + "unique": ["_unique", {"default": False}], + "db_index": ["db_index", {"default": False}], + "default": ["default", {"default": NOT_PROVIDED}], + "db_column": ["db_column", {"default": None}], + "db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_INDEX_TABLESPACE}], + }, + ), + ( + (models.ForeignKey, models.OneToOneField), + [], + { + "to": ["rel.to", {}], + "to_field": ["rel.field_name", {"default_attr": "rel.to._meta.pk.name"}], + "related_name": ["rel.related_name", {"default": None}], + "db_index": ["db_index", {"default": True}], + }, + ), + ( + (models.ManyToManyField,), + [], + { + "to": ["rel.to", {}], + "symmetrical": ["rel.symmetrical", {"default": True}], + }, + ), + ( + (models.DateField, models.TimeField), + [], + { + "auto_now": ["auto_now", {"default": False}], + "auto_now_add": ["auto_now_add", {"default": False}], + }, + ), + ( + (models.DecimalField, ), + [], + { + "max_digits": ["max_digits", {"default": None}], + "decimal_places": ["decimal_places", {"default": None}], + }, + ), + ( + (models.BooleanField, ), + [], + { + "default": ["default", {"default": NOT_PROVIDED, "converter": bool}], + }, + ), + ( + (models.FilePathField, ), + [], + { + "path": ["path", {"default": ''}], + "match": ["match", {"default": None}], + "recursive": ["recursive", {"default": False}], + }, + ), + ( + (generic.GenericRelation, ), + [], + { + "to": ["rel.to", {}], + "symmetrical": ["rel.symmetrical", {"default": True}], + "object_id_field": ["object_id_field_name", {"default": "object_id"}], + "content_type_field": ["content_type_field_name", {"default": "content_type"}], + "blank": ["blank", {"default": True}], + }, + ), +] + +# Regexes of allowed field full paths +allowed_fields = [ + "^django\.db", + "^django\.contrib\.contenttypes\.generic", + "^django\.contrib\.localflavor", +] + +# Similar, but for Meta, so just the inner level (kwds). +meta_details = { + "db_table": ["db_table", {"default_attr_concat": ["%s_%s", "app_label", "module_name"]}], + "db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_TABLESPACE}], + "unique_together": ["unique_together", {"default": []}], +} + +# 2.4 compatability +any = lambda x: reduce(lambda y, z: y or z, x, False) + + +def add_introspection_rules(rules=[], patterns=[]): + "Allows you to add some introspection rules at runtime, e.g. for 3rd party apps." + assert isinstance(rules, (list, tuple)) + assert isinstance(patterns, (list, tuple)) + allowed_fields.extend(patterns) + introspection_details.extend(rules) + + +def can_introspect(field): + """ + Returns True if we are allowed to introspect this field, False otherwise. + ('allowed' means 'in core'. Custom fields can declare they are introspectable + by the default South rules by adding the attribute _south_introspects = True.) + """ + # Check for special attribute + if hasattr(field, "_south_introspects") and field._south_introspects: + return True + # Check it's an introspectable field + full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__) + for regex in allowed_fields: + if re.match(regex, full_name): + return True + return False + + +def matching_details(field): + """ + Returns the union of all matching entries in introspection_details for the field. + """ + our_args = [] + our_kwargs = {} + for classes, args, kwargs in introspection_details: + if any([isinstance(field, x) for x in classes]): + our_args.extend(args) + our_kwargs.update(kwargs) + return our_args, our_kwargs + + +class IsDefault(Exception): + """ + Exception for when a field contains its default value. + """ + + +def get_value(field, descriptor): + """ + Gets an attribute value from a Field instance and formats it. + """ + attrname, options = descriptor + value = get_attribute(field, attrname) + # Lazy-eval functions get eval'd. + if isinstance(value, Promise): + value = unicode(value) + # If the value is the same as the default, omit it for clarity + if "default" in options and value == options['default']: + raise IsDefault + # If there's an ignore_if, use it + if "ignore_if" in options: + if get_attribute(field, options['ignore_if']): + raise IsDefault + # Some default values need to be gotten from an attribute too. + if "default_attr" in options: + default_value = get_attribute(field, options['default_attr']) + if value == default_value: + raise IsDefault + # Some are made from a formatting string and several attrs (e.g. db_table) + if "default_attr_concat" in options: + format, attrs = options['default_attr_concat'][0], options['default_attr_concat'][1:] + default_value = format % tuple(map(lambda x: get_attribute(field, x), attrs)) + if value == default_value: + raise IsDefault + # Callables get called. + if callable(value) and not isinstance(value, ModelBase): + # Datetime.datetime.now is special, as we can access it from the eval + # context (and because it changes all the time; people will file bugs otherwise). + if value == datetime.datetime.now: + return "datetime.datetime.now" + if value == datetime.datetime.utcnow: + return "datetime.datetime.utcnow" + if value == datetime.date.today: + return "datetime.date.today" + # All other callables get called. + value = value() + # Models get their own special repr() + if isinstance(value, ModelBase): + # If it's a proxy model, follow it back to its non-proxy parent + if getattr(value._meta, "proxy", False): + value = value._meta.proxy_for_model + return "orm['%s.%s']" % (value._meta.app_label, value._meta.object_name) + # As do model instances + if isinstance(value, Model): + return "orm['%s.%s'].objects.get(pk=%r)" % (value.__class__._meta.app_label, value.__class__._meta.object_name, value.pk) + # Now, apply the converter func if there is one + if "converter" in options: + value = options['converter'](value) + # Return the final value + return repr(value) + + +def introspector(field): + """ + Given a field, introspects its definition triple. + """ + arg_defs, kwarg_defs = matching_details(field) + args = [] + kwargs = {} + # For each argument, use the descriptor to get the real value. + for defn in arg_defs: + try: + args.append(get_value(field, defn)) + except IsDefault: + pass + for kwd, defn in kwarg_defs.items(): + try: + kwargs[kwd] = get_value(field, defn) + except IsDefault: + pass + return args, kwargs + + +def get_model_fields(model, m2m=False): + """ + Given a model class, returns a dict of {field_name: field_triple} defs. + """ + + field_defs = SortedDict() + inherited_fields = {} + + # Go through all bases (that are themselves models, but not Model) + for base in model.__bases__: + if base != models.Model and issubclass(base, models.Model): + if not base._meta.abstract: + # Looks like we need their fields, Ma. + inherited_fields.update(get_model_fields(base)) + + # Now, ask the parser to have a look at this model too. + try: + parser_fields = modelsparser.get_model_fields(model, m2m) or {} + except (TypeError, IndentationError): # Almost certainly a not-real module + parser_fields = {} + + # Now, go through all the fields and try to get their definition + source = model._meta.local_fields[:] + if m2m: + source += model._meta.local_many_to_many + + for field in source: + # Does it define a south_field_triple method? + if hasattr(field, "south_field_triple"): + if NOISY: + print " ( Nativing field: %s" % field.name + field_defs[field.name] = field.south_field_triple() + # Can we introspect it? + elif can_introspect(field): + #if NOISY: + # print "Introspecting field: %s" % field.name + # Get the full field class path. + field_class = field.__class__.__module__ + "." + field.__class__.__name__ + # Run this field through the introspector + args, kwargs = introspector(field) + # That's our definition! + field_defs[field.name] = (field_class, args, kwargs) + # Hmph. Is it parseable? + elif parser_fields.get(field.name, None): + if NOISY: + print " ( Parsing field: %s" % field.name + field_defs[field.name] = parser_fields[field.name] + # Shucks, no definition! + else: + if NOISY: + print " ( Nodefing field: %s" % field.name + field_defs[field.name] = None + + return field_defs + + +def get_model_meta(model): + """ + Given a model class, will return the dict representing the Meta class. + """ + + # Get the introspected attributes + meta_def = {} + for kwd, defn in meta_details.items(): + try: + meta_def[kwd] = get_value(model._meta, defn) + except IsDefault: + pass + + return meta_def + +# Now, load the built-in South introspection plugins +import south.introspection_plugins diff --git a/south/modelsparser.py b/south/modelsparser.py new file mode 100644 index 000000000..24b0863f9 --- /dev/null +++ b/south/modelsparser.py @@ -0,0 +1,428 @@ +""" +Parsing module for models.py files. Extracts information in a more reliable +way than inspect + regexes. +Now only used as a fallback when introspection and the South custom hook both fail. +""" + +import re +import inspect +import parser +import symbol +import token +import keyword +import datetime + +from django.db import models +from django.contrib.contenttypes import generic +from django.utils.datastructures import SortedDict +from django.core.exceptions import ImproperlyConfigured + + +def name_that_thing(thing): + "Turns a symbol/token int into its name." + for name in dir(symbol): + if getattr(symbol, name) == thing: + return "symbol.%s" % name + for name in dir(token): + if getattr(token, name) == thing: + return "token.%s" % name + return str(thing) + + +def thing_that_name(name): + "Turns a name of a symbol/token into its integer value." + if name in dir(symbol): + return getattr(symbol, name) + if name in dir(token): + return getattr(token, name) + raise ValueError("Cannot convert '%s'" % name) + + +def prettyprint(tree, indent=0, omit_singles=False): + "Prettyprints the tree, with symbol/token names. For debugging." + if omit_singles and isinstance(tree, tuple) and len(tree) == 2: + return prettyprint(tree[1], indent, omit_singles) + if isinstance(tree, tuple): + return " (\n%s\n" % "".join([prettyprint(x, indent+1) for x in tree]) + \ + (" " * indent) + ")" + elif isinstance(tree, int): + return (" " * indent) + name_that_thing(tree) + else: + return " " + repr(tree) + + +def isclass(obj): + "Simple test to see if something is a class." + return issubclass(type(obj), type) + + +def aliased_models(module): + """ + Given a models module, returns a dict mapping all alias imports of models + (e.g. import Foo as Bar) back to their original names. Bug #134. + """ + aliases = {} + for name, obj in module.__dict__.items(): + if isclass(obj) and issubclass(obj, models.Model) and obj is not models.Model: + # Test to see if this has a different name to what it should + if name != obj._meta.object_name: + aliases[name] = obj._meta.object_name + return aliases + + + +class STTree(object): + + "A syntax tree wrapper class." + + def __init__(self, tree): + self.tree = tree + + + def __eq__(self, other): + return other.tree == self.tree + + + def __hash__(self): + return hash(self.tree) + + + @property + def root(self): + return self.tree[0] + + + @property + def value(self): + return self.tree + + + def walk(self, recursive=True): + """ + Yields (symbol, subtree) for the entire subtree. + Comes out with node 1, node 1's children, node 2, etc. + """ + stack = [self.tree] + done_outer = False + while stack: + atree = stack.pop() + if isinstance(atree, tuple): + if done_outer: + yield atree[0], STTree(atree) + if recursive or not done_outer: + for bit in reversed(atree[1:]): + stack.append(bit) + done_outer = True + + + def flatten(self): + "Yields the tokens/symbols in the tree only, in order." + bits = [] + for sym, subtree in self.walk(): + if sym in token_map: + bits.append(sym) + elif sym == token.NAME: + bits.append(subtree.value) + elif sym == token.STRING: + bits.append(subtree.value) + elif sym == token.NUMBER: + bits.append(subtree.value) + return bits + + + def reform(self): + "Prints how the tree's input probably looked." + return reform(self.flatten()) + + + def findAllType(self, ntype, recursive=True): + "Returns all nodes with the given type in the tree." + for symbol, subtree in self.walk(recursive=recursive): + if symbol == ntype: + yield subtree + + + def find(self, selector): + """ + Searches the syntax tree with a CSS-like selector syntax. + You can use things like 'suite simple_stmt', 'suite, simple_stmt' + or 'suite > simple_stmt'. Not guaranteed to return in order. + """ + # Split up the overall parts + patterns = [x.strip() for x in selector.split(",")] + results = [] + for pattern in patterns: + # Split up the parts + parts = re.split(r'(?:[\s]|(>))+', pattern) + # Take the first part, use it for results + if parts[0] == "^": + subresults = [self] + else: + subresults = list(self.findAllType(thing_that_name(parts[0]))) + recursive = True + # For each remaining part, do something + for part in parts[1:]: + if not subresults: + break + if part == ">": + recursive = False + elif not part: + pass + else: + thing = thing_that_name(part) + newresults = [ + list(tree.findAllType(thing, recursive)) + for tree in subresults + ] + subresults = [] + for stuff in newresults: + subresults.extend(stuff) + recursive = True + results.extend(subresults) + return results + + + def __str__(self): + return prettyprint(self.tree) + __repr__ = __str__ + + +def get_model_tree(model): + # Get the source of the model's file + try: + source = inspect.getsource(model).replace("\r\n", "\n").replace("\r","\n") + "\n" + except IOError: + return None + tree = STTree(parser.suite(source).totuple()) + # Now, we have to find it + for poss in tree.find("compound_stmt"): + if poss.value[1][0] == symbol.classdef and \ + poss.value[1][2][1].lower() == model.__name__.lower(): + # This is the tree + return poss + + +token_map = { + token.DOT: ".", + token.LPAR: "(", + token.RPAR: ")", + token.EQUAL: "=", + token.EQEQUAL: "==", + token.COMMA: ",", + token.LSQB: "[", + token.RSQB: "]", + token.AMPER: "&", + token.BACKQUOTE: "`", + token.CIRCUMFLEX: "^", + token.CIRCUMFLEXEQUAL: "^=", + token.COLON: ":", + token.DOUBLESLASH: "//", + token.DOUBLESLASHEQUAL: "//=", + token.DOUBLESTAR: "**", + token.DOUBLESLASHEQUAL: "**=", + token.GREATER: ">", + token.LESS: "<", + token.GREATEREQUAL: ">=", + token.LESSEQUAL: "<=", + token.LBRACE: "{", + token.RBRACE: "}", + token.SEMI: ";", + token.PLUS: "+", + token.MINUS: "-", + token.STAR: "*", + token.SLASH: "/", + token.VBAR: "|", + token.PERCENT: "%", + token.TILDE: "~", + token.AT: "@", + token.NOTEQUAL: "!=", + token.LEFTSHIFT: "<<", + token.RIGHTSHIFT: ">>", + token.LEFTSHIFTEQUAL: "<<=", + token.RIGHTSHIFTEQUAL: ">>=", + token.PLUSEQUAL: "+=", + token.MINEQUAL: "-=", + token.STAREQUAL: "*=", + token.SLASHEQUAL: "/=", + token.VBAREQUAL: "|=", + token.PERCENTEQUAL: "%=", + token.AMPEREQUAL: "&=", +} + + +def reform(bits): + "Returns the string that the list of tokens/symbols 'bits' represents" + output = "" + for bit in bits: + if bit in token_map: + output += token_map[bit] + elif bit[0] in [token.NAME, token.STRING, token.NUMBER]: + if keyword.iskeyword(bit[1]): + output += " %s " % bit[1] + else: + if bit[1] not in symbol.sym_name: + output += bit[1] + return output + + +def parse_arguments(argstr): + """ + Takes a string representing arguments and returns the positional and + keyword argument list and dict respectively. + All the entries in these are python source, except the dict keys. + """ + # Get the tree + tree = STTree(parser.suite(argstr).totuple()) + + # Initialise the lists + curr_kwd = None + args = [] + kwds = {} + + # Walk through, assigning things + testlists = tree.find("testlist") + for i, testlist in enumerate(testlists): + # BTW: A testlist is to the left or right of an =. + items = list(testlist.walk(recursive=False)) + for j, item in enumerate(items): + if item[0] == symbol.test: + if curr_kwd: + kwds[curr_kwd] = item[1].reform() + curr_kwd = None + elif j == len(items)-1 and i != len(testlists)-1: + # Last item in a group must be a keyword, unless it's last overall + curr_kwd = item[1].reform() + else: + args.append(item[1].reform()) + return args, kwds + + +def extract_field(tree): + # Collapses the tree and tries to parse it as a field def + bits = tree.flatten() + ## Check it looks right: + # Second token should be equals + if len(bits) < 2 or bits[1] != token.EQUAL: + return + ## Split into meaningful sections + name = bits[0][1] + declaration = bits[2:] + # Find the first LPAR; stuff before that is the class. + try: + lpar_at = declaration.index(token.LPAR) + except ValueError: + return + clsname = reform(declaration[:lpar_at]) + # Now, inside that, find the last RPAR, and we'll take the stuff between + # them as the arguments + declaration.reverse() + rpar_at = (len(declaration) - 1) - declaration.index(token.RPAR) + declaration.reverse() + args = declaration[lpar_at+1:rpar_at] + # Now, extract the arguments as a list and dict + try: + args, kwargs = parse_arguments(reform(args)) + except SyntaxError: + return + # OK, extract and reform it + return name, clsname, args, kwargs + + + +def get_model_fields(model, m2m=False): + """ + Given a model class, will return the dict of name: field_constructor + mappings. + """ + tree = get_model_tree(model) + if tree is None: + return None + possible_field_defs = tree.find("^ > classdef > suite > stmt > simple_stmt > small_stmt > expr_stmt") + field_defs = {} + + # Get aliases, ready for alias fixing (#134) + try: + aliases = aliased_models(models.get_app(model._meta.app_label)) + except ImproperlyConfigured: + aliases = {} + + # Go through all the found defns, and try to parse them + for pfd in possible_field_defs: + field = extract_field(pfd) + if field: + field_defs[field[0]] = field[1:] + + inherited_fields = {} + # Go through all bases (that are themselves models, but not Model) + for base in model.__bases__: + if base != models.Model and issubclass(base, models.Model): + inherited_fields.update(get_model_fields(base, m2m)) + + # Now, go through all the fields and try to get their definition + source = model._meta.local_fields[:] + if m2m: + source += model._meta.local_many_to_many + fields = SortedDict() + for field in source: + # Get its name + fieldname = field.name + if isinstance(field, (models.related.RelatedObject, generic.GenericRel)): + continue + # Now, try to get the defn + if fieldname in field_defs: + fields[fieldname] = field_defs[fieldname] + # Try the South definition workaround? + elif hasattr(field, 'south_field_triple'): + fields[fieldname] = field.south_field_triple() + elif hasattr(field, 'south_field_definition'): + print "Your custom field %s provides the outdated south_field_definition method.\nPlease consider implementing south_field_triple too; it's more reliably evaluated." % field + fields[fieldname] = field.south_field_definition() + # Try a parent? + elif fieldname in inherited_fields: + fields[fieldname] = inherited_fields[fieldname] + # Is it a _ptr? + elif fieldname.endswith("_ptr"): + fields[fieldname] = ("models.OneToOneField", ["orm['%s.%s']" % (field.rel.to._meta.app_label, field.rel.to._meta.object_name)], {}) + # Try a default for 'id'. + elif fieldname == "id": + fields[fieldname] = ("models.AutoField", [], {"primary_key": "True"}) + else: + fields[fieldname] = None + + # Now, try seeing if we can resolve the values of defaults, and fix aliases. + for field, defn in fields.items(): + + if not isinstance(defn, (list, tuple)): + continue # We don't have a defn for this one, or it's a string + + # Fix aliases if we can (#134) + for i, arg in enumerate(defn[1]): + if arg in aliases: + defn[1][i] = aliases[arg] + + # Fix defaults if we can + for arg, val in defn[2].items(): + if arg in ['default']: + try: + # Evaluate it in a close-to-real fake model context + real_val = eval(val, __import__(model.__module__, {}, {}, ['']).__dict__, model.__dict__) + # If we can't resolve it, stick it in verbatim + except: + pass # TODO: Raise nice error here? + # Hm, OK, we got a value. Callables are not frozen (see #132, #135) + else: + if callable(real_val): + # HACK + # However, if it's datetime.now, etc., that's special + for datetime_key in datetime.datetime.__dict__.keys(): + # No, you can't use __dict__.values. It's different. + dtm = getattr(datetime.datetime, datetime_key) + if real_val == dtm: + if not val.startswith("datetime.datetime"): + defn[2][arg] = "datetime." + val + break + else: + defn[2][arg] = repr(real_val) + + + return fields diff --git a/south/orm.py b/south/orm.py new file mode 100644 index 000000000..b5ae63d8d --- /dev/null +++ b/south/orm.py @@ -0,0 +1,365 @@ +""" +South's fake ORM; lets you not have to write SQL inside migrations. +Roughly emulates the real Django ORM, to a point. +""" + +import inspect +import datetime + +from django.db import models +from django.db.models.loading import cache +from django.core.exceptions import ImproperlyConfigured + +from south.db import db +from south.utils import ask_for_it_by_name +from south.hacks import hacks + + +class ModelsLocals(object): + + """ + Custom dictionary-like class to be locals(); + falls back to lowercase search for items that don't exist + (because we store model names as lowercase). + """ + + def __init__(self, data): + self.data = data + + def __getitem__(self, key): + try: + return self.data[key] + except KeyError: + return self.data[key.lower()] + + +# Stores already-created ORMs. +_orm_cache = {} + +def FakeORM(*args): + """ + Creates a Fake Django ORM. + This is actually a memoised constructor; the real class is _FakeORM. + """ + if not args in _orm_cache: + _orm_cache[args] = _FakeORM(*args) + return _orm_cache[args] + + +class LazyFakeORM(object): + """ + In addition to memoising the ORM call, this function lazily generates them + for a Migration class. Assign the result of this to (for example) + .orm, and as soon as .orm is accessed the ORM will be created. + """ + + def __init__(self, *args): + self._args = args + self.orm = None + + def __get__(self, obj, type=None): + if not self.orm: + self.orm = FakeORM(*self._args) + return self.orm + + +class _FakeORM(object): + + """ + Simulates the Django ORM at some point in time, + using a frozen definition on the Migration class. + """ + + def __init__(self, cls, app): + self.default_app = app + self.cls = cls + # Try loading the models off the migration class; default to no models. + self.models = {} + try: + self.models_source = cls.models + except AttributeError: + return + + # Start a 'new' AppCache + hacks.clear_app_cache() + + # Now, make each model's data into a FakeModel + # We first make entries for each model that are just its name + # This allows us to have circular model dependency loops + model_names = [] + for name, data in self.models_source.items(): + # Make sure there's some kind of Meta + if "Meta" not in data: + data['Meta'] = {} + try: + app_name, model_name = name.split(".", 1) + except ValueError: + app_name = self.default_app + model_name = name + name = "%s.%s" % (app_name, model_name) + + name = name.lower() + self.models[name] = name + model_names.append((name, app_name, model_name, data)) + + for name, app_name, model_name, data in model_names: + self.models[name] = self.make_model(app_name, model_name, data) + + # And perform the second run to iron out any circular/backwards depends. + self.retry_failed_fields() + + # Force evaluation of relations on the models now + for model in self.models.values(): + model._meta.get_all_field_names() + + # Reset AppCache + hacks.unclear_app_cache() + + + def __iter__(self): + return iter(self.models.values()) + + + def __getattr__(self, key): + fullname = (self.default_app+"."+key).lower() + try: + return self.models[fullname] + except KeyError: + raise AttributeError("The model '%s' from the app '%s' is not available in this migration." % (key, self.default_app)) + + + def __getitem__(self, key): + # Detect if they asked for a field on a model or not. + if ":" in key: + key, fname = key.split(":") + else: + fname = None + # Now, try getting the model + key = key.lower() + try: + model = self.models[key] + except KeyError: + try: + app, model = key.split(".", 1) + except ValueError: + raise KeyError("The model '%s' is not in appname.modelname format." % key) + else: + raise KeyError("The model '%s' from the app '%s' is not available in this migration." % (model, app)) + # If they asked for a field, get it. + if fname: + return model._meta.get_field_by_name(fname)[0] + else: + return model + + + def eval_in_context(self, code, app, extra_imports={}): + "Evaluates the given code in the context of the migration file." + + # Drag in the migration module's locals (hopefully including models.py) + fake_locals = dict(inspect.getmodule(self.cls).__dict__) + + # Remove all models from that (i.e. from modern models.py), to stop pollution + for key, value in fake_locals.items(): + if isinstance(value, type) and issubclass(value, models.Model) and hasattr(value, "_meta"): + del fake_locals[key] + + # We add our models into the locals for the eval + fake_locals.update(dict([ + (name.split(".")[-1], model) + for name, model in self.models.items() + ])) + + # Make sure the ones for this app override. + fake_locals.update(dict([ + (name.split(".")[-1], model) + for name, model in self.models.items() + if name.split(".")[0] == app + ])) + + # Ourselves as orm, to allow non-fail cross-app referencing + fake_locals['orm'] = self + + # And a fake _ function + fake_locals['_'] = lambda x: x + + # Datetime; there should be no datetime direct accesses + fake_locals['datetime'] = datetime + + # Now, go through the requested imports and import them. + for name, value in extra_imports.items(): + # First, try getting it out of locals. + parts = value.split(".") + try: + obj = fake_locals[parts[0]] + for part in parts[1:]: + obj = getattr(obj, part) + except (KeyError, AttributeError): + pass + else: + fake_locals[name] = obj + continue + # OK, try to import it directly + try: + fake_locals[name] = ask_for_it_by_name(value) + except ImportError: + if name == "SouthFieldClass": + raise ValueError("Cannot import the required field '%s'" % value) + else: + print "WARNING: Cannot import '%s'" % value + + # Use ModelsLocals to make lookups work right for CapitalisedModels + fake_locals = ModelsLocals(fake_locals) + + return eval(code, globals(), fake_locals) + + + def make_meta(self, app, model, data, stub=False): + "Makes a Meta class out of a dict of eval-able arguments." + results = {'app_label': app} + for key, code in data.items(): + # Some things we never want to use. + if key in ["_bases"]: + continue + # Some things we don't want with stubs. + if stub and key in ["order_with_respect_to"]: + continue + # OK, add it. + try: + results[key] = self.eval_in_context(code, app) + except (NameError, AttributeError), e: + raise ValueError("Cannot successfully create meta field '%s' for model '%s.%s': %s." % ( + key, app, model, e + )) + return type("Meta", tuple(), results) + + + def make_model(self, app, name, data): + "Makes a Model class out of the given app name, model name and pickled data." + + # Extract any bases out of Meta + if "_bases" in data['Meta']: + bases = data['Meta']['_bases'] + else: + bases = ['django.db.models.Model'] + + # Turn the Meta dict into a basic class + meta = self.make_meta(app, name, data['Meta'], data.get("_stub", False)) + + failed_fields = {} + fields = {} + stub = False + + # Now, make some fields! + for fname, params in data.items(): + # If it's the stub marker, ignore it. + if fname == "_stub": + stub = bool(params) + continue + elif fname == "Meta": + continue + elif not params: + raise ValueError("Field '%s' on model '%s.%s' has no definition." % (fname, app, name)) + elif isinstance(params, (str, unicode)): + # It's a premade definition string! Let's hope it works... + code = params + extra_imports = {} + else: + # If there's only one parameter (backwards compat), make it 3. + if len(params) == 1: + params = (params[0], [], {}) + # There should be 3 parameters. Code is a tuple of (code, what-to-import) + if len(params) == 3: + code = "SouthFieldClass(%s)" % ", ".join( + params[1] + + ["%s=%s" % (n, v) for n, v in params[2].items()] + ) + extra_imports = {"SouthFieldClass": params[0]} + else: + raise ValueError("Field '%s' on model '%s.%s' has a weird definition length (should be 1 or 3 items)." % (fname, app, name)) + + try: + # Execute it in a probably-correct context. + field = self.eval_in_context(code, app, extra_imports) + except (NameError, AttributeError, AssertionError, KeyError): + # It might rely on other models being around. Add it to the + # model for the second pass. + failed_fields[fname] = (code, extra_imports) + else: + fields[fname] = field + + # Find the app in the Django core, and get its module + more_kwds = {} + try: + app_module = models.get_app(app) + more_kwds['__module__'] = app_module.__name__ + except ImproperlyConfigured: + # The app this belonged to has vanished, but thankfully we can still + # make a mock model, so ignore the error. + more_kwds['__module__'] = '_south_mock' + + more_kwds['Meta'] = meta + + # Make our model + fields.update(more_kwds) + + model = type( + str(name), + tuple(map(ask_for_it_by_name, bases)), + fields, + ) + + # If this is a stub model, change Objects to a whiny class + if stub: + model.objects = WhinyManager() + # Also, make sure they can't instantiate it + model.__init__ = whiny_method + else: + model.objects = NoDryRunManager(model.objects) + + if failed_fields: + model._failed_fields = failed_fields + + return model + + def retry_failed_fields(self): + "Tries to re-evaluate the _failed_fields for each model." + for modelkey, model in self.models.items(): + app, modelname = modelkey.split(".", 1) + if hasattr(model, "_failed_fields"): + for fname, (code, extra_imports) in model._failed_fields.items(): + try: + field = self.eval_in_context(code, app, extra_imports) + except (NameError, AttributeError, AssertionError, KeyError), e: + # It's failed again. Complain. + raise ValueError("Cannot successfully create field '%s' for model '%s': %s." % ( + fname, modelname, e + )) + else: + # Startup that field. + model.add_to_class(fname, field) + + +class WhinyManager(object): + "A fake manager that whines whenever you try to touch it. For stub models." + + def __getattr__(self, key): + raise AttributeError("You cannot use items from a stub model.") + + +class NoDryRunManager(object): + """ + A manager that always proxies through to the real manager, + unless a dry run is in progress. + """ + + def __init__(self, real): + self.real = real + + def __getattr__(self, name): + if db.dry_run: + raise AttributeError("You are in a dry run, and cannot access the ORM.\nWrap ORM sections in 'if not db.dry_run:', or if the whole migration is only a data migration, set no_dry_run = True on the Migration class.") + return getattr(self.real, name) + + +def whiny_method(*a, **kw): + raise ValueError("You cannot instantiate a stub model.") diff --git a/south/signals.py b/south/signals.py new file mode 100644 index 000000000..c7cc4ce4a --- /dev/null +++ b/south/signals.py @@ -0,0 +1,14 @@ +""" +South-specific signals +""" + +from django.dispatch import Signal + +# Sent at the start of the migration of an app +pre_migrate = Signal(providing_args=["app"]) + +# Sent after each successful migration of an app +post_migrate = Signal(providing_args=["app"]) + +# Sent after each run of a particular migration in a direction +ran_migration = Signal(providing_args=["app","migration","method"]) diff --git a/south/tests/.gitignore b/south/tests/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/tests/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/tests/__init__.py b/south/tests/__init__.py new file mode 100644 index 000000000..83e42a414 --- /dev/null +++ b/south/tests/__init__.py @@ -0,0 +1,67 @@ + +import unittest +import os +import sys +from django.conf import settings +from south.hacks import hacks + +# Add the tests directory so fakeapp is on sys.path +test_root = os.path.dirname(__file__) +sys.path.append(test_root) + +# Note: the individual test files are imported below this. + +class Monkeypatcher(unittest.TestCase): + + """ + Base test class for tests that play with the INSTALLED_APPS setting at runtime. + """ + + def create_fake_app(self, name): + + class Fake: + pass + + fake = Fake() + fake.__name__ = name + return fake + + + def create_test_app(self): + + class Fake: + pass + + fake = Fake() + fake.__name__ = "fakeapp.migrations" + fake.__file__ = os.path.join(test_root, "fakeapp", "migrations", "__init__.py") + return fake + + + def setUp(self): + """ + Changes the Django environment so we can run tests against our test apps. + """ + # Set the installed apps + hacks.set_installed_apps(["fakeapp", "otherfakeapp"]) + + + def tearDown(self): + """ + Undoes what setUp did. + """ + hacks.reset_installed_apps() + + +# Try importing all tests if asked for (then we can run 'em) +try: + skiptest = settings.SKIP_SOUTH_TESTS +except: + skiptest = False + +if not skiptest: + from south.tests.db import * + from south.tests.logic import * + from south.tests.autodetection import * + from south.tests.logger import * + from south.tests.inspector import * diff --git a/south/tests/autodetection.py b/south/tests/autodetection.py new file mode 100644 index 000000000..980cab679 --- /dev/null +++ b/south/tests/autodetection.py @@ -0,0 +1,233 @@ +import unittest + +from south.management.commands import startmigration + +class TestComparison(unittest.TestCase): + + """ + Tests the comparison methods of startmigration. + """ + + def test_no_change(self): + "Test with a completely unchanged definition." + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['southdemo.Lizard']"}), + ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['southdemo.Lizard']"}), + ), + False, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.related.ForeignKey', ['ohhai', 'there'], {'to': "somewhere", "from": "there"}), + ('django.db.models.fields.related.ForeignKey', ['ohhai', 'there'], {"from": "there", 'to': "somewhere"}), + ), + False, + ) + + + def test_pos_change(self): + "Test with a changed positional argument." + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['hi'], {'to': "foo"}), + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ('django.db.models.fields.CharField', ['bye'], {'to': "foo"}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}), + ), + False, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['pisdadad'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['hi'], {}), + ('django.db.models.fields.CharField', [], {}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', [], {}), + ('django.db.models.fields.CharField', ['bye'], {}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['pi'], {}), + ('django.db.models.fields.CharField', ['pi'], {}), + ), + False, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['pi'], {}), + ('django.db.models.fields.CharField', ['45fdfdf'], {}), + ), + True, + ) + + + def test_kwd_change(self): + "Test a changed keyword argument" + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['pi'], {'to': "blue"}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ('django.db.models.fields.CharField', [], {'to': "blue"}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['b'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['b'], {'to': "blue"}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ('django.db.models.fields.CharField', [], {}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['a'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['a'], {}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', [], {}), + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('django.db.models.fields.CharField', ['a'], {}), + ('django.db.models.fields.CharField', ['a'], {'to': "foo"}), + ), + True, + ) + + + + def test_backcompat_nochange(self): + "Test that the backwards-compatable comparison is working" + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', [], {}), + ('django.db.models.fields.CharField', [], {}), + ), + False, + ) + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', ['ack'], {}), + ('django.db.models.fields.CharField', ['ack'], {}), + ), + False, + ) + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', [], {'to':'b'}), + ('django.db.models.fields.CharField', [], {'to':'b'}), + ), + False, + ) + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', ['hah'], {'to':'you'}), + ('django.db.models.fields.CharField', ['hah'], {'to':'you'}), + ), + False, + ) + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', ['hah'], {'to':'you'}), + ('django.db.models.fields.CharField', ['hah'], {'to':'heh'}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', ['hah'], {}), + ('django.db.models.fields.CharField', [], {'to':"orm['appname.hah']"}), + ), + False, + ) + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', ['hah'], {}), + ('django.db.models.fields.CharField', [], {'to':'hah'}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', ['hah'], {}), + ('django.db.models.fields.CharField', [], {'to':'rrr'}), + ), + True, + ) + + self.assertEqual( + startmigration.different_attributes( + ('models.CharField', ['hah'], {}), + ('django.db.models.fields.IntField', [], {'to':'hah'}), + ), + True, + ) \ No newline at end of file diff --git a/south/tests/db.py b/south/tests/db.py new file mode 100644 index 000000000..145ca5290 --- /dev/null +++ b/south/tests/db.py @@ -0,0 +1,357 @@ +import unittest + +from south.db import db +from django.db import connection, models + +# Create a list of error classes from the various database libraries +errors = [] +try: + from psycopg2 import ProgrammingError + errors.append(ProgrammingError) +except ImportError: + pass +errors = tuple(errors) + +class TestOperations(unittest.TestCase): + + """ + Tests if the various DB abstraction calls work. + Can only test a limited amount due to DB differences. + """ + + def setUp(self): + db.debug = False + db.clear_deferred_sql() + + def test_create(self): + """ + Test creation and deletion of tables. + """ + cursor = connection.cursor() + # It needs to take at least 2 args + self.assertRaises(TypeError, db.create_table) + self.assertRaises(TypeError, db.create_table, "test1") + # Empty tables (i.e. no columns) are not fine, so make at least 1 + db.create_table("test1", [('email_confirmed', models.BooleanField(default=False))]) + db.start_transaction() + # And should exist + cursor.execute("SELECT * FROM test1") + # Make sure we can't do the same query on an empty table + try: + cursor.execute("SELECT * FROM nottheretest1") + self.fail("Non-existent table could be selected!") + except: + pass + # Clear the dirty transaction + db.rollback_transaction() + db.start_transaction() + # Remove the table + db.drop_table("test1") + # Make sure it went + try: + cursor.execute("SELECT * FROM test1") + self.fail("Just-deleted table could be selected!") + except: + pass + # Clear the dirty transaction + db.rollback_transaction() + db.start_transaction() + # Try deleting a nonexistent one + try: + db.delete_table("nottheretest1") + self.fail("Non-existent table could be deleted!") + except: + pass + db.rollback_transaction() + + def test_foreign_keys(self): + """ + Tests foreign key creation, especially uppercase (see #61) + """ + Test = db.mock_model(model_name='Test', db_table='test5a', + db_tablespace='', pk_field_name='ID', + pk_field_type=models.AutoField, pk_field_args=[]) + db.start_transaction() + db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))]) + db.create_table("test5b", [ + ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), + ('UNIQUE', models.ForeignKey(Test)), + ]) + db.execute_deferred_sql() + db.rollback_transaction() + + def test_rename(self): + """ + Test column renaming + """ + cursor = connection.cursor() + db.create_table("test_rn", [('spam', models.BooleanField(default=False))]) + db.start_transaction() + # Make sure we can select the column + cursor.execute("SELECT spam FROM test_rn") + # Rename it + db.rename_column("test_rn", "spam", "eggs") + cursor.execute("SELECT eggs FROM test_rn") + try: + cursor.execute("SELECT spam FROM test_rn") + self.fail("Just-renamed column could be selected!") + except: + pass + db.rollback_transaction() + db.delete_table("test_rn") + + def test_dry_rename(self): + """ + Test column renaming while --dry-run is turned on (should do nothing) + See ticket #65 + """ + cursor = connection.cursor() + db.create_table("test_drn", [('spam', models.BooleanField(default=False))]) + db.start_transaction() + # Make sure we can select the column + cursor.execute("SELECT spam FROM test_drn") + # Rename it + db.dry_run = True + db.rename_column("test_drn", "spam", "eggs") + db.dry_run = False + cursor.execute("SELECT spam FROM test_drn") + try: + cursor.execute("SELECT eggs FROM test_drn") + self.fail("Dry-renamed new column could be selected!") + except: + pass + db.rollback_transaction() + db.delete_table("test_drn") + + def test_table_rename(self): + """ + Test column renaming + """ + cursor = connection.cursor() + db.create_table("testtr", [('spam', models.BooleanField(default=False))]) + db.start_transaction() + # Make sure we can select the column + cursor.execute("SELECT spam FROM testtr") + # Rename it + db.rename_table("testtr", "testtr2") + cursor.execute("SELECT spam FROM testtr2") + try: + cursor.execute("SELECT spam FROM testtr") + self.fail("Just-renamed column could be selected!") + except: + pass + db.rollback_transaction() + db.delete_table("testtr2") + + def test_index(self): + """ + Test the index operations + """ + db.create_table("test3", [ + ('SELECT', models.BooleanField(default=False)), + ('eggs', models.IntegerField(unique=True)), + ]) + db.execute_deferred_sql() + db.start_transaction() + # Add an index on that column + db.create_index("test3", ["SELECT"]) + # Add another index on two columns + db.create_index("test3", ["SELECT", "eggs"]) + # Delete them both + db.delete_index("test3", ["SELECT"]) + db.delete_index("test3", ["SELECT", "eggs"]) + # Delete the unique index/constraint + db.delete_unique("test3", ["eggs"]) + db.rollback_transaction() + db.delete_table("test3") + + def test_primary_key(self): + """ + Test the primary key operations + """ + db.create_table("test_pk", [ + ('id', models.IntegerField(primary_key=True)), + ('new_pkey', models.IntegerField()), + ('eggs', models.IntegerField(unique=True)), + ]) + db.execute_deferred_sql() + db.start_transaction() + # Remove the default primary key, and make eggs it + db.drop_primary_key("test_pk") + db.create_primary_key("test_pk", "new_pkey") + # Try inserting a now-valid row pair + db.execute("INSERT INTO test_pk (id, new_pkey, eggs) VALUES (1, 2, 3), (1, 3, 4)") + db.rollback_transaction() + db.delete_table("test_pk") + + def test_alter(self): + """ + Test altering columns/tables + """ + db.create_table("test4", [ + ('spam', models.BooleanField(default=False)), + ('eggs', models.IntegerField()), + ]) + db.start_transaction() + # Add a column + db.add_column("test4", "add1", models.IntegerField(default=3), keep_default=False) + # Add a FK with keep_default=False (#69) + User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) + db.add_column("test4", "user", models.ForeignKey(User, null=True), keep_default=False) + db.delete_column("test4", "add1") + + db.rollback_transaction() + db.delete_table("test4") + + def test_alter_column_postgres_multiword(self): + """ + Tests altering columns with multiple words in Postgres types (issue #125) + e.g. 'datetime with time zone', look at django/db/backends/postgresql/creation.py + """ + db.create_table("test_multiword", [ + ('col_datetime', models.DateTimeField(null=True)), + ('col_integer', models.PositiveIntegerField(null=True)), + ('col_smallint', models.PositiveSmallIntegerField(null=True)), + ('col_float', models.FloatField(null=True)), + ]) + + # test if 'double precision' is preserved + db.alter_column('test_multiword', 'col_float', models.FloatField('float', null=True)) + + # test if 'CHECK ("%(column)s" >= 0)' is stripped + db.alter_column('test_multiword', 'col_integer', models.PositiveIntegerField(null=True)) + db.alter_column('test_multiword', 'col_smallint', models.PositiveSmallIntegerField(null=True)) + + # test if 'with timezone' is preserved + if db.backend_name == "postgres": + db.start_transaction() + db.execute("INSERT INTO test_multiword (col_datetime) VALUES ('2009-04-24 14:20:55+02')") + db.alter_column('test_multiword', 'col_datetime', models.DateTimeField(auto_now=True)) + assert db.execute("SELECT col_datetime = '2009-04-24 14:20:55+02' FROM test_multiword")[0][0] + db.rollback_transaction() + + + db.delete_table("test_multiword") + + def test_alter_constraints(self): + """ + Tests that going from a PostiveIntegerField to an IntegerField drops + the constraint on the database. + """ + db.create_table("test_alterc", [ + ('num', models.PositiveIntegerField()), + ]) + # Add in some test values + db.execute("INSERT INTO test_alterc (num) VALUES (1), (2)") + # Ensure that adding a negative number is bad + db.start_transaction() + try: + db.execute("INSERT INTO test_alterc (num) VALUES (-3)") + except: + db.rollback_transaction() + else: + self.fail("Could insert a negative integer into a PositiveIntegerField.") + # Alter it to a normal IntegerField + db.alter_column("test_alterc", "num", models.IntegerField()) + # It should now work + db.execute("INSERT INTO test_alterc (num) VALUES (-3)") + db.delete_table("test_alterc") + + def test_unique(self): + """ + Tests creating/deleting unique constraints. + """ + db.create_table("test_unique2", [ + ('id', models.AutoField(primary_key=True)), + ]) + db.create_table("test_unique", [ + ('spam', models.BooleanField(default=False)), + ('eggs', models.IntegerField()), + ('ham', models.ForeignKey(db.mock_model('Unique2', 'test_unique2'))), + ]) + # Add a constraint + db.create_unique("test_unique", ["spam"]) + # Shouldn't do anything during dry-run + db.dry_run = True + db.delete_unique("test_unique", ["spam"]) + db.dry_run = False + db.delete_unique("test_unique", ["spam"]) + db.create_unique("test_unique", ["spam"]) + db.start_transaction() + # Test it works + db.execute("INSERT INTO test_unique2 (id) VALUES (1), (2)") + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1), (false, 1, 2)") + try: + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 2, 1)") + except: + db.rollback_transaction() + else: + self.fail("Could insert non-unique item.") + # Drop that, add one only on eggs + db.delete_unique("test_unique", ["spam"]) + db.execute("DELETE FROM test_unique") + db.create_unique("test_unique", ["eggs"]) + db.start_transaction() + # Test similarly + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1), (false, 1, 2)") + try: + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 1, 1)") + except: + db.rollback_transaction() + else: + self.fail("Could insert non-unique item.") + # Drop those, test combined constraints + db.delete_unique("test_unique", ["eggs"]) + db.execute("DELETE FROM test_unique") + db.create_unique("test_unique", ["spam", "eggs", "ham_id"]) + db.start_transaction() + # Test similarly + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1), (false, 1, 1)") + try: + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)") + except: + db.rollback_transaction() + else: + self.fail("Could insert non-unique pair.") + db.delete_unique("test_unique", ["spam", "eggs", "ham_id"]) + + def test_capitalised_constraints(self): + """ + Under PostgreSQL at least, capitalised constrains must be quoted. + """ + db.start_transaction() + try: + db.create_table("test_capconst", [ + ('SOMECOL', models.PositiveIntegerField(primary_key=True)), + ]) + # Alter it so it's not got the check constraint + db.alter_column("test_capconst", "SOMECOL", models.IntegerField()) + finally: + db.rollback_transaction() + + def test_text_default(self): + """ + MySQL cannot have blank defaults on TEXT columns. + """ + db.start_transaction() + try: + db.create_table("test_textdef", [ + ('textcol', models.TextField(blank=True)), + ]) + finally: + db.rollback_transaction() + + def test_add_unique_fk(self): + """ + Test adding a ForeignKey with unique=True or a OneToOneField + """ + db.create_table("test_add_unique_fk", [ + ('spam', models.BooleanField(default=False)) + ]) + db.start_transaction() + + db.add_column("test_add_unique_fk", "mock1", models.ForeignKey(db.mock_model('Mock', 'mock'), null=True, unique=True)) + db.add_column("test_add_unique_fk", "mock2", models.OneToOneField(db.mock_model('Mock', 'mock'), null=True)) + + db.rollback_transaction() + db.delete_table("test_add_unique_fk") diff --git a/south/tests/fakeapp/.gitignore b/south/tests/fakeapp/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/tests/fakeapp/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/tests/fakeapp/__init__.py b/south/tests/fakeapp/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/south/tests/fakeapp/migrations/.gitignore b/south/tests/fakeapp/migrations/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/tests/fakeapp/migrations/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/tests/fakeapp/migrations/0001_spam.py b/south/tests/fakeapp/migrations/0001_spam.py new file mode 100644 index 000000000..d81454867 --- /dev/null +++ b/south/tests/fakeapp/migrations/0001_spam.py @@ -0,0 +1,19 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + + # Model 'Spam' + db.create_table("southtest_spam", ( + ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), + ('weight', models.FloatField()), + ('expires', models.DateTimeField()), + ('name', models.CharField(max_length=255)) + )) + + def backwards(self): + + db.delete_table("southtest_spam") + diff --git a/south/tests/fakeapp/migrations/0002_eggs.py b/south/tests/fakeapp/migrations/0002_eggs.py new file mode 100644 index 000000000..3ec83999f --- /dev/null +++ b/south/tests/fakeapp/migrations/0002_eggs.py @@ -0,0 +1,20 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + + Spam = db.mock_model(model_name='Spam', db_table='southtest_spam', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField) + + db.create_table("southtest_eggs", ( + ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), + ('size', models.FloatField()), + ('quantity', models.IntegerField()), + ('spam', models.ForeignKey(Spam)), + )) + + def backwards(self): + + db.delete_table("southtest_eggs") + diff --git a/south/tests/fakeapp/migrations/0003_alter_spam.py b/south/tests/fakeapp/migrations/0003_alter_spam.py new file mode 100644 index 000000000..763fa208b --- /dev/null +++ b/south/tests/fakeapp/migrations/0003_alter_spam.py @@ -0,0 +1,22 @@ +from south.db import db +from django.db import models + +class Migration: + + needed_by = ( + ("otherfakeapp", "0003_third"), + ) + + def forwards(self): + + db.alter_column("southtest_spam", 'name', models.CharField(max_length=255, null=True)) + + def backwards(self): + + db.alter_column("southtest_spam", 'name', models.CharField(max_length=255)) + + models = { + "fakeapp.bug135": { + 'date': ('models.DateTimeField', [], {'default': 'datetime.datetime(2009, 5, 6, 15, 33, 15, 780013)'}), + } + } \ No newline at end of file diff --git a/south/tests/fakeapp/migrations/__init__.py b/south/tests/fakeapp/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/south/tests/fakeapp/models.py b/south/tests/fakeapp/models.py new file mode 100644 index 000000000..a7d84dced --- /dev/null +++ b/south/tests/fakeapp/models.py @@ -0,0 +1,55 @@ +# -*- coding: UTF-8 -*- + +from django.db import models +from django.contrib.auth.models import User as UserAlias + +def default_func(): + return "yays" + +# An empty case. +class Other1(models.Model): pass + +# Nastiness. +class HorribleModel(models.Model): + "A model to test the edge cases of model parsing" + + ZERO, ONE = range(2) + + # First, some nice fields + name = models.CharField(max_length=255) + short_name = models.CharField(max_length=50) + slug = models.SlugField(unique=True) + + # A ForeignKey, to a model above, and then below + o1 = models.ForeignKey(Other1) + o2 = models.ForeignKey('Other2') + + # Now to something outside + user = models.ForeignKey(UserAlias, related_name="horribles") + + # Unicode! + code = models.CharField(max_length=25, default="↑↑↓↓←→←→BA") + + # Odd defaults! + class_attr = models.IntegerField(default=ZERO) + func = models.CharField(max_length=25, default=default_func) + + # Time to get nasty. Define a non-field choices, and use it + choices = [('hello', '1'), ('world', '2')] + choiced = models.CharField(max_length=20, choices=choices) + + class Meta: + db_table = "my_fave" + verbose_name = "Dr. Strangelove," + \ + """or how I learned to stop worrying +and love the bomb""" + + # Now spread over multiple lines + multiline = \ + models.TextField( + ) + +# Special case. +class Other2(models.Model): + # Try loading a field without a newline after it (inspect hates this) + close_but_no_cigar = models.PositiveIntegerField(primary_key=True) \ No newline at end of file diff --git a/south/tests/inspector.py b/south/tests/inspector.py new file mode 100644 index 000000000..818ffefad --- /dev/null +++ b/south/tests/inspector.py @@ -0,0 +1,49 @@ +import unittest + +from south.tests import Monkeypatcher +from south.modelsinspector import * +from fakeapp.models import HorribleModel + +class TestModelInspector(Monkeypatcher): + + """ + Tests if the various parts of the modelinspector work. + """ + + def test_get_value(self): + + # Let's start nicely. + name = HorribleModel._meta.get_field_by_name("name")[0] + slug = HorribleModel._meta.get_field_by_name("slug")[0] + user = HorribleModel._meta.get_field_by_name("user")[0] + + # Simple int retrieval + self.assertEqual( + get_value(name, ["max_length", {}]), + "255", + ) + + # Bool retrieval + self.assertEqual( + get_value(slug, ["unique", {}]), + "True", + ) + + # String retrieval + self.assertEqual( + get_value(user, ["rel.related_name", {}]), + "'horribles'", + ) + + # Default triggering + self.assertEqual( + get_value(slug, ["unique", {"default": False}]), + "True", + ) + self.assertRaises( + IsDefault, + get_value, + slug, + ["unique", {"default": True}], + ) + \ No newline at end of file diff --git a/south/tests/logger.py b/south/tests/logger.py new file mode 100644 index 000000000..cc6c6ed2c --- /dev/null +++ b/south/tests/logger.py @@ -0,0 +1,54 @@ +import os +import unittest +from django.conf import settings +from django.db import connection, models + +from south.db import db + +# +# # Create a list of error classes from the various database libraries +# errors = [] +# try: +# from psycopg2 import ProgrammingError +# errors.append(ProgrammingError) +# except ImportError: +# pass +# errors = tuple(errors) + +class TestLogger(unittest.TestCase): + + """ + Tests if the various logging functions. + """ + def setUp(self): + db.debug = False + self.test_path = os.path.join(os.path.dirname(__file__),"test.log") + + def test_db_execute_logging_nofile(self): + """ Does logging degrade nicely if SOUTH_DEBUG_ON not set? + """ + settings.SOUTH_LOGGING_ON = False # this needs to be set to False + # to avoid issues where other tests + # set this to True. settings is shared + # between these tests. + db.create_table("test9", [('email_confirmed', models.BooleanField(default=False))]) + + def test_db_execute_logging_validfile(self): + """ Does logging work when passing in a valid file? + """ + settings.SOUTH_LOGGING_ON = True + settings.SOUTH_LOGGING_FILE = self.test_path + db.create_table("test10", [('email_confirmed', models.BooleanField(default=False))]) + + # remove the test log file + os.remove(self.test_path) + + def test_db_execute_logging_missingfilename(self): + """ Does logging raise an error if there is a missing filename? + """ + settings.SOUTH_LOGGING_ON = True + settings.SOUTH_LOGGING_FILE = None + self.assertRaises(IOError, + db.create_table, "test11", [('email_confirmed', models.BooleanField(default=False))]) + + \ No newline at end of file diff --git a/south/tests/logic.py b/south/tests/logic.py new file mode 100644 index 000000000..b244696a2 --- /dev/null +++ b/south/tests/logic.py @@ -0,0 +1,271 @@ +import unittest +import datetime +import sys +import os +import StringIO + +from south import migration +from south.tests import Monkeypatcher +from south.utils import snd + +class TestMigrationLogic(Monkeypatcher): + + """ + Tests if the various logic functions in migration actually work. + """ + + def test_get_app_name(self): + self.assertEqual( + "southtest", + migration.get_app_name(self.create_fake_app("southtest.migrations")), + ) + self.assertEqual( + "baz", + migration.get_app_name(self.create_fake_app("foo.bar.baz.migrations")), + ) + + + def test_get_migrated_apps(self): + + P1 = __import__("fakeapp.migrations", {}, {}, ['']) + P2 = __import__("otherfakeapp.migrations", {}, {}, ['']) + + self.assertEqual( + [P1,P2], + list(migration.get_migrated_apps()), + ) + + + def test_get_app(self): + + P1 = __import__("fakeapp.migrations", {}, {}, ['']) + + self.assertEqual(P1, migration.get_app("fakeapp")) + self.assertEqual(P1, migration.get_app(self.create_fake_app("fakeapp.models"))) + + + def test_get_app_fullname(self): + self.assertEqual( + "southtest", + migration.get_app_fullname(self.create_fake_app("southtest.migrations")), + ) + self.assertEqual( + "foo.bar.baz", + migration.get_app_fullname(self.create_fake_app("foo.bar.baz.migrations")), + ) + + + def test_get_migration_names(self): + + app = self.create_test_app() + + self.assertEqual( + ["0001_spam", "0002_eggs", "0003_alter_spam"], + migration.get_migration_names(app), + ) + + + def test_get_migration_classes(self): + + app = self.create_test_app() + + # Can't use vanilla import, modules beginning with numbers aren't in grammar + M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration + M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration + M3 = __import__("fakeapp.migrations.0003_alter_spam", {}, {}, ['Migration']).Migration + + self.assertEqual( + [M1, M2, M3], + list(migration.get_migration_classes(app)), + ) + + + def test_get_migration(self): + + app = self.create_test_app() + + # Can't use vanilla import, modules beginning with numbers aren't in grammar + M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration + M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration + + self.assertEqual(M1, migration.get_migration(app, "0001_spam")) + self.assertEqual(M2, migration.get_migration(app, "0002_eggs")) + + # Temporarily redirect sys.stdout during this, it whinges. + stdout, sys.stdout = sys.stdout, StringIO.StringIO() + try: + self.assertRaises((ImportError, ValueError), migration.get_migration, app, "0001_jam") + finally: + sys.stdout = stdout + + + def test_all_migrations(self): + + app = migration.get_app("fakeapp") + otherapp = migration.get_app("otherfakeapp") + + self.assertEqual({ + app: { + "0001_spam": migration.get_migration(app, "0001_spam"), + "0002_eggs": migration.get_migration(app, "0002_eggs"), + "0003_alter_spam": migration.get_migration(app, "0003_alter_spam"), + }, + otherapp: { + "0001_first": migration.get_migration(otherapp, "0001_first"), + "0002_second": migration.get_migration(otherapp, "0002_second"), + "0003_third": migration.get_migration(otherapp, "0003_third"), + }, + }, + migration.all_migrations(), + ) + + + def assertListEqual(self, list1, list2): + list1 = list(list1) + list2 = list(list2) + list1.sort() + list2.sort() + return self.assertEqual(list1, list2) + + + def test_apply_migrations(self): + migration.MigrationHistory.objects.all().delete() + app = migration.get_app("fakeapp") + + # We should start with no migrations + self.assertEqual(list(migration.MigrationHistory.objects.all()), []) + + # Apply them normally + tree = migration.dependency_tree() + migration.migrate_app(app, tree, target_name=None, resolve_mode=None, fake=False, verbosity=0) + + # We should finish with all migrations + self.assertListEqual( + ( + (u"fakeapp", u"0001_spam"), + (u"fakeapp", u"0002_eggs"), + (u"fakeapp", u"0003_alter_spam"), + ), + migration.MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # Now roll them backwards + migration.migrate_app(app, tree, target_name="zero", resolve_mode=None, fake=False, verbosity=0) + + # Finish with none + self.assertEqual(list(migration.MigrationHistory.objects.all()), []) + + + def test_migration_merge_forwards(self): + migration.MigrationHistory.objects.all().delete() + app = migration.get_app("fakeapp") + + # We should start with no migrations + self.assertEqual(list(migration.MigrationHistory.objects.all()), []) + + # Insert one in the wrong order + migration.MigrationHistory.objects.create( + app_name = "fakeapp", + migration = "0002_eggs", + applied = datetime.datetime.now(), + ) + + # Did it go in? + self.assertListEqual( + ( + (u"fakeapp", u"0002_eggs"), + ), + migration.MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # Apply them normally + tree = migration.dependency_tree() + try: + # Redirect the error it will print to nowhere + stdout, sys.stdout = sys.stdout, StringIO.StringIO() + migration.migrate_app(app, tree, target_name=None, resolve_mode=None, fake=False, verbosity=0) + sys.stdout = stdout + except SystemExit: + pass + + # Nothing should have changed (no merge mode!) + self.assertListEqual( + ( + (u"fakeapp", u"0002_eggs"), + ), + migration.MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # Apply with merge + migration.migrate_app(app, tree, target_name=None, resolve_mode="merge", fake=False, verbosity=0) + + # We should finish with all migrations + self.assertListEqual( + ( + (u"fakeapp", u"0001_spam"), + (u"fakeapp", u"0002_eggs"), + (u"fakeapp", u"0003_alter_spam"), + ), + migration.MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # Now roll them backwards + migration.migrate_app(app, tree, target_name="0002", resolve_mode=None, fake=False, verbosity=0) + migration.migrate_app(app, tree, target_name="0001", resolve_mode=None, fake=True, verbosity=0) + migration.migrate_app(app, tree, target_name="zero", resolve_mode=None, fake=False, verbosity=0) + + # Finish with none + self.assertEqual(list(migration.MigrationHistory.objects.all()), []) + + def test_alter_column_null(self): + def null_ok(): + from django.db import connection, transaction + # the DBAPI introspection module fails on postgres NULLs. + cursor = connection.cursor() + try: + cursor.execute("INSERT INTO southtest_spam (id, weight, expires, name) VALUES (100, 10.1, now(), NULL);") + except: + transaction.rollback() + return False + else: + cursor.execute("DELETE FROM southtest_spam") + transaction.commit() + return True + + app = migration.get_app("fakeapp") + tree = migration.dependency_tree() + self.assertEqual(list(migration.MigrationHistory.objects.all()), []) + + # by default name is NOT NULL + migration.migrate_app(app, tree, target_name="0002", resolve_mode=None, fake=False, verbosity=0) + self.failIf(null_ok()) + + # after 0003, it should be NULL + migration.migrate_app(app, tree, target_name="0003", resolve_mode=None, fake=False, verbosity=0) + self.assert_(null_ok()) + + # make sure it is NOT NULL again + migration.migrate_app(app, tree, target_name="0002", resolve_mode=None, fake=False, verbosity=0) + self.failIf(null_ok(), 'name not null after migration') + + # finish with no migrations, otherwise other tests fail... + migration.migrate_app(app, tree, target_name="zero", resolve_mode=None, fake=False, verbosity=0) + self.assertEqual(list(migration.MigrationHistory.objects.all()), []) + + def test_dependencies(self): + + fakeapp = migration.get_app("fakeapp") + otherfakeapp = migration.get_app("otherfakeapp") + + # Test a simple path + tree = migration.dependency_tree() + self.assertEqual( + map(snd, migration.needed_before_forwards(tree, fakeapp, "0003_alter_spam")), + ['0001_spam', '0002_eggs'], + ) + + # And a complex one, with both back and forwards deps + self.assertEqual( + map(snd, migration.needed_before_forwards(tree, otherfakeapp, "0003_third")), + ['0001_spam', '0001_first', '0002_second', '0002_eggs', '0003_alter_spam'], + ) \ No newline at end of file diff --git a/south/tests/otherfakeapp/.gitignore b/south/tests/otherfakeapp/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/tests/otherfakeapp/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/tests/otherfakeapp/__init__.py b/south/tests/otherfakeapp/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/south/tests/otherfakeapp/migrations/.gitignore b/south/tests/otherfakeapp/migrations/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/south/tests/otherfakeapp/migrations/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/south/tests/otherfakeapp/migrations/0001_first.py b/south/tests/otherfakeapp/migrations/0001_first.py new file mode 100644 index 000000000..ad9c09599 --- /dev/null +++ b/south/tests/otherfakeapp/migrations/0001_first.py @@ -0,0 +1,15 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = ( + ("fakeapp", "0001_spam"), + ) + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/south/tests/otherfakeapp/migrations/0002_second.py b/south/tests/otherfakeapp/migrations/0002_second.py new file mode 100644 index 000000000..7c0fb0cf2 --- /dev/null +++ b/south/tests/otherfakeapp/migrations/0002_second.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/south/tests/otherfakeapp/migrations/0003_third.py b/south/tests/otherfakeapp/migrations/0003_third.py new file mode 100644 index 000000000..f67024142 --- /dev/null +++ b/south/tests/otherfakeapp/migrations/0003_third.py @@ -0,0 +1,10 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass \ No newline at end of file diff --git a/south/tests/otherfakeapp/migrations/__init__.py b/south/tests/otherfakeapp/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/south/tests/otherfakeapp/models.py b/south/tests/otherfakeapp/models.py new file mode 100644 index 000000000..93a4b8edf --- /dev/null +++ b/south/tests/otherfakeapp/models.py @@ -0,0 +1 @@ +# This file left intentionally blank. \ No newline at end of file diff --git a/south/utils.py b/south/utils.py new file mode 100644 index 000000000..ff63aa73b --- /dev/null +++ b/south/utils.py @@ -0,0 +1,39 @@ +""" +Generally helpful utility functions. +""" + + +def _ask_for_it_by_name(name): + "Returns an object referenced by absolute path." + bits = name.split(".") + + ## what if there is no absolute reference? + if len(bits)>1: + modulename = ".".join(bits[:-1]) + else: + modulename=bits[0] + + module = __import__(modulename, {}, {}, bits[-1]) + return getattr(module, bits[-1]) + + +def ask_for_it_by_name(name): + "Returns an object referenced by absolute path. (Memoised outer wrapper)" + if name not in ask_for_it_by_name.cache: + ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name) + return ask_for_it_by_name.cache[name] +ask_for_it_by_name.cache = {} + + +def get_attribute(item, attribute): + """ + Like getattr, but recursive (i.e. you can ask for 'foo.bar.yay'.) + """ + value = item + for part in attribute.split("."): + value = getattr(value, part) + return value + + +fst = lambda (x, y): x +snd = lambda (x, y): y diff --git a/static/.gitignore b/static/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/static/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/static/css/.gitignore b/static/css/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/static/css/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/static/css/base.css b/static/css/base.css deleted file mode 100644 index ada50f6ae..000000000 --- a/static/css/base.css +++ /dev/null @@ -1,34 +0,0 @@ -body { - margin:0; - padding:0; - font-family: Arial, sans-serif; - font-size: 13px; - color: #022D66; - font-style: normal; - } -th { - padding:6px 0px 10px 30px; - line-height:1em; - font-family: Arial, sans-serif; - font-size: 1.0em; - color: #333; - - } -td {text-decoration: none; color: #000000; font: 10pt arial;} -td img { whitespace: nowrap; display:inline; } -/* Links ------------------------------------------------ */ -a:link, a:visited { - border-bottom:1px dotted #69f; - color:#36c; - text-decoration:none; - } -a:visited { - border-bottom-color:#969; - color:#36c; - } -a:hover { - border-bottom:1px solid #f00; - color:#f00; - } -a.noline:link, a.noline:visited, a.noline:hover {border-style:none;} diff --git a/static/css/base2.css b/static/css/base2.css new file mode 100644 index 000000000..cadf77a92 --- /dev/null +++ b/static/css/base2.css @@ -0,0 +1,142 @@ +/* +* Copyright (C) 2009-2010 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. Contact: Pasi Eronen +* +* Redistribution and use in source and binary forms, with or without +* modification, are permitted provided that the following conditions +* are met: +* +* * Redistributions of source code must retain the above copyright +* notice, this list of conditions and the following disclaimer. +* +* * Redistributions in binary form must reproduce the above +* copyright notice, this list of conditions and the following +* disclaimer in the documentation and/or other materials provided +* with the distribution. +* +* * Neither the name of the Nokia Corporation and/or its +* subsidiary(-ies) nor the names of its contributors may be used +* to endorse or promote products derived from this software +* without specific prior written permission. +* +* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +body { margin: 0; } +.yui-skin-sam h1 {margin: 0.5em 0; font-size: 167%;} +.yui-skin-sam .yui-navset .yui-content { + background: white; + border:0px; + border-top:1px solid #243356; + padding:0.5em 0; +} +.yui-navset .disabled a em {color:#a0a0a0;} + +.yui-skin-sam h1 { margin-top: 0; } + +#ietf-login { color: white; position:absolute; top:8px; right: 10px; } +#ietf-login a, #ietf-login a:visited { color: white; } + +.ietf-box { background:#edf5ff; border: 1px solid #cccccc; } + +.ietf-navbar { background-color: #edf5ff; padding:0; border: 1px solid #89d; margin-top:4px; } +.ietf-navbar ul { padding: 0; margin: 0; } +.ietf-navbar ul li { list-style: none; padding: 0; margin: 0; font-size: 93%; padding: 2px 0px 2px 0px; } +.ietf-navbar ul li.yuimenuitem { padding: 0px 0px 1px 0px; border: 0;} +.ietf-navbar ul li a { padding: 0px 0px 0px 10px; } +.ietf-navbar ul li.sect a { padding: 2px 2px 2px 2px; } +.ietf-navbar ul li.sect { font-weight:bold; color:#fff; background:#2647A0; margin-top:2px; text-indent:2px; padding: 2px 0;} +.ietf-navbar ul li.first { margin-top: 0px; } +.ietf-navbar ul li.sect a { color:#fff; } +.ietf-navbar a, .ietf-navbar a:visited { color: #000000; } +.ietf-navbar > ul > li > a:hover { background-color: #b3d4ff; } +.ietf-navbar .yuimenuitemlabel { font-size: 12px; padding: 0 10px; } +.ietf-navbar #wgs .bd { background-color: #edf5ff; } +.ietf-navbar #wgs > .bd { border: 0;} + +.ietf-ballot .left { background: #edf5ff; width:160px; padding-left: 10px; } +.ietf-ballot .right { padding-left: 15px; padding-right:15px; width:610px;padding-top:0px;} +.ietf-ballot h2.ballot_ad { background: #2647A0; color:white; padding: 2px 4px; font-size: 108%; margin-top: 0;} +.ietf-ballot .right { background: white; } +.ietf-ballot .square { border:1px solid black; display: block; float:left;width: 10px; height:10px;font-size:1px;margin-right:4px; margin-top:1px;} +.ietf-ballot .was { padding-left: 10px; font-size:85%; } + +.search_form_box {width: 99.5%; margin-top:8px; padding:4px; margin-bottom:1em; padding-left:8px;} +form#search_form { padding-top: 4px; padding-bottom: 4px; } +#search_form input { padding: 0; padding-left: 2px; border: 1px solid #89d;} +#search_form input.radio { padding-left: 0; border: 0; } +#search_form select { border: 1px solid #89d; } +#search_form div.search_field { margin-top:2px; clear:both;} +#search_form label { width: 170px; float: left; } +/* checkboxes for document types */ +#search_form table#search_types { border-collapse:collapse;} +#search_form #search_types td { padding:0; } +#search_form #search_types td input { margin-left: 0; width:14px; border:0;} +/* give checkbox a fixed width so that IE6 aligns the left edge correctly */ +#search_form #id_filename, +#search_form #id_author { width: 244px; } +#search_form #id_state, +#search_form #id_ad, +#search_form #id_positionAd { width:248px; } +#search_form #id_group {width: 120px; margin-right:4px; } +#search_form #id_area {width:120px; } + +table.ietf-table { border-collapse:collapse; border:1px solid #7f7f7f; } +.ietf-table tr.evenrow { background-color: #EDF5FF; } +.ietf-table tr.oddrow { background-color: white; } +.ietf-table td { border-right: 1px solid #cbcbcb; padding:3px 6px; } +.ietf-table th { color:white; background: #2647A0; text-align:left; padding:3px 6px; border-right: 1px solid #7f7f7f; } + +.ietf-doctable tr.header { border-top: 1px solid #7f7f7f; border-bottom: 1px solid #7f7f7f; border-left: 1px solid white; border-right:2px solid white;} +.ietf-doctable tr.header td {padding: 6px 6px; font-weight: bold; } +.ietf-doctable table { max-width: 1200px; } +.ietf-doctable th.doc, .ietf-doctable td.doc { min-width:20em; max-width: 35em; } +.ietf-doctable th.title, .ietf-doctable td.title { min-width: 20em; max-width: 35em; } +.ietf-doctable th.date, .ietf-doctable td.date { white-space:nowrap; min-width: 6em;} +.ietf-doctable th.status, .ietf-doctable td.status { min-width: 20em;} +.ietf-doctable th.ad, .ietf-doctable td.ad { white-space:nowrap; min-width: 6em; } +.ietf-doctable td.ballot { border-left: hidden; min-width: 37px; } + +table.ballot_icon { empty-cells: show; padding: 0; border-spacing: 0; border: 1px solid black; border-collapse: collapse; table-layout:fixed; min-width:35px; background:white; } +table.ballot_icon td { border: 1px solid black; height: 7px; width: 6px; padding: 0;} +td.ballot_icon_green { background:#80ff80; } +td.ballot_icon_red { background: #c00000; color: yellow; } +td.ballot_icon_gray { background: #c0c0c0; } +td.ballot_icon_yellow { background: #ffff00; } +table.ballot_icon td.ballot_icon_my { border: 3px outset black;} + +.ietf-small { font-size:85%; } +.ietf-highlight-y { padding:0 2px;background:yellow;} +.ietf-highlight-r { padding:0 2px;background:#ffa0a0;} +.ietf-divider { background: #2647a0; color: white; font-size:116%; padding:0.5em 1em; } + +form .actions { + margin-top: 20px; +} + +form .actions a { + margin-right: 10px; +} + +form table th { + font-weight: normal; + text-align: left; + padding-right: 10px; + padding-top: 2px; + vertical-align: top; +} + +form table .help { + font-style: italic; + font-size: 11px; +} diff --git a/static/css/mobile.css b/static/css/mobile.css new file mode 100644 index 000000000..f98326af9 --- /dev/null +++ b/static/css/mobile.css @@ -0,0 +1,36 @@ +/* Copyright The IETF Trust 2007, All Rights Reserved */ + +body { + margin: 0; + padding: 0; + background-color: white; + font-family: sans-serif; + font-size: small; + + font-size-adjust: none; + font-stretch: normal; +} + + +.legal { + font-size: .7em; + font-weight: bold; + text-align: center; + padding: 0.4em; +} + +h1 { font-size: 1.44em; font-weight: bold; text-align: center; color: white; background-color: blue; } +h2 { font-size: 1.2em; font-weight: bold; text-align: center; color: white; background-color: blue; } +h3 { font-size: 1.095em; font-weight: bold; text-align: center; color: white; background-color: blue; } +h4 { font-size: 1em; font-weight: bold; text-align: left; color: white; background-color: blue; padding: 0.2em; } +h5 { font-size: 1em; font-weight: bold; text-align: left; color: black; background-color: #BBF; padding: 0.2em; } +h5 a { font-size: 1em; font-weight: bold; text-align: left; color: black; background-color: #BBF; padding: 0.2em; } + +.header { + border: 0; + margin: 0; + padding: 0.1em; + color: white; + background-color: blue; +} + diff --git a/static/css/yui/yui-20100305.css b/static/css/yui/yui-20100305.css new file mode 100644 index 000000000..a93e622c4 --- /dev/null +++ b/static/css/yui/yui-20100305.css @@ -0,0 +1,35 @@ +/* +Copyright (c) 2009, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.net/yui/license.txt +version: 2.8.0r4 +*/ +body{font:13px/1.231 arial,helvetica,clean,sans-serif;*font-size:small;*font:x-small;}select,input,button,textarea,button{font:99% arial,helvetica,clean,sans-serif;}table{font-size:inherit;font:100%;}pre,code,kbd,samp,tt{font-family:monospace;*font-size:108%;line-height:100%;}/* +Copyright (c) 2009, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.net/yui/license.txt +version: 2.8.0r4 +*/ +.yui-overlay,.yui-panel-container{visibility:hidden;position:absolute;z-index:2;}.yui-panel{position:relative;}.yui-panel-container form{margin:0;}.mask{z-index:1;display:none;position:absolute;top:0;left:0;right:0;bottom:0;}.mask.block-scrollbars{overflow:auto;}.masked select,.drag select,.hide-select select{_visibility:hidden;}.yui-panel-container select{_visibility:inherit;}.hide-scrollbars,.hide-scrollbars *{overflow:hidden;}.hide-scrollbars select{display:none;}.show-scrollbars{overflow:auto;}.yui-panel-container.show-scrollbars,.yui-tt.show-scrollbars{overflow:visible;}.yui-panel-container.show-scrollbars .underlay,.yui-tt.show-scrollbars .yui-tt-shadow{overflow:auto;}.yui-panel-container.shadow .underlay.yui-force-redraw{padding-bottom:1px;}.yui-effect-fade .underlay,.yui-effect-fade .yui-tt-shadow{display:none;}.yui-tt-shadow{position:absolute;}.yui-override-padding{padding:0!important;}.yui-panel-container .container-close{overflow:hidden;text-indent:-10000em;text-decoration:none;}.yui-overlay.yui-force-redraw,.yui-panel-container.yui-force-redraw{margin-bottom:1px;}.yui-skin-sam .mask{background-color:#000;opacity:.25;filter:alpha(opacity=25);}.yui-skin-sam .yui-panel-container{padding:0 1px;*padding:2px;}.yui-skin-sam .yui-panel{position:relative;left:0;top:0;border-style:solid;border-width:1px 0;border-color:#808080;z-index:1;*border-width:1px;*zoom:1;_zoom:normal;}.yui-skin-sam .yui-panel .hd,.yui-skin-sam .yui-panel .bd,.yui-skin-sam .yui-panel .ft{border-style:solid;border-width:0 1px;border-color:#808080;margin:0 -1px;*margin:0;*border:0;}.yui-skin-sam .yui-panel .hd{border-bottom:solid 1px #ccc;}.yui-skin-sam .yui-panel .bd,.yui-skin-sam .yui-panel .ft{background-color:#F2F2F2;}.yui-skin-sam .yui-panel .hd{padding:0 10px;font-size:93%;line-height:2;*line-height:1.9;font-weight:bold;color:#000;background:url(/images/yui/sprite.png) repeat-x 0 -200px;}.yui-skin-sam .yui-panel .bd{padding:10px;}.yui-skin-sam .yui-panel .ft{border-top:solid 1px #808080;padding:5px 10px;font-size:77%;}.yui-skin-sam .container-close{position:absolute;top:5px;right:6px;width:25px;height:15px;background:url(/images/yui/sprite.png) no-repeat 0 -300px;cursor:pointer;}.yui-skin-sam .yui-panel-container .underlay{right:-1px;left:-1px;}.yui-skin-sam .yui-panel-container.matte{padding:9px 10px;background-color:#fff;}.yui-skin-sam .yui-panel-container.shadow{_padding:2px 4px 0 2px;}.yui-skin-sam .yui-panel-container.shadow .underlay{position:absolute;top:2px;left:-3px;right:-3px;bottom:-3px;*top:4px;*left:-1px;*right:-1px;*bottom:-1px;_top:0;_left:0;_right:0;_bottom:0;_margin-top:3px;_margin-left:-1px;background-color:#000;opacity:.12;filter:alpha(opacity=12);}.yui-skin-sam .yui-dialog .ft{border-top:none;padding:0 10px 10px 10px;font-size:100%;}.yui-skin-sam .yui-dialog .ft .button-group{display:block;text-align:right;}.yui-skin-sam .yui-dialog .ft button.default{font-weight:bold;}.yui-skin-sam .yui-dialog .ft span.default{border-color:#304369;background-position:0 -1400px;}.yui-skin-sam .yui-dialog .ft span.default .first-child{border-color:#304369;}.yui-skin-sam .yui-dialog .ft span.default button{color:#fff;}.yui-skin-sam .yui-dialog .ft span.yui-button-disabled{background-position:0 -1500px;border-color:#ccc;}.yui-skin-sam .yui-dialog .ft span.yui-button-disabled .first-child{border-color:#ccc;}.yui-skin-sam .yui-dialog .ft span.yui-button-disabled button{color:#a6a6a6;}.yui-skin-sam .yui-simple-dialog .bd .yui-icon{background:url(/images/yui/sprite.png) no-repeat 0 0;width:16px;height:16px;margin-right:10px;float:left;}.yui-skin-sam .yui-simple-dialog .bd span.blckicon{background-position:0 -1100px;}.yui-skin-sam .yui-simple-dialog .bd span.alrticon{background-position:0 -1050px;}.yui-skin-sam .yui-simple-dialog .bd span.hlpicon{background-position:0 -1150px;}.yui-skin-sam .yui-simple-dialog .bd span.infoicon{background-position:0 -1200px;}.yui-skin-sam .yui-simple-dialog .bd span.warnicon{background-position:0 -1900px;}.yui-skin-sam .yui-simple-dialog .bd span.tipicon{background-position:0 -1250px;}.yui-skin-sam .yui-tt .bd{position:relative;top:0;left:0;z-index:1;color:#000;padding:2px 5px;border-color:#D4C237 #A6982B #A6982B #A6982B;border-width:1px;border-style:solid;background-color:#FFEE69;}.yui-skin-sam .yui-tt.show-scrollbars .bd{overflow:auto;}.yui-skin-sam .yui-tt-shadow{top:2px;right:-3px;left:-3px;bottom:-3px;background-color:#000;}.yui-skin-sam .yui-tt-shadow-visible{opacity:.12;filter:alpha(opacity=12);} +/* +Copyright (c) 2009, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.net/yui/license.txt +version: 2.8.0r4 +*/ +.yuimenu{top:-999em;left:-999em;}.yuimenubar{position:static;}.yuimenu .yuimenu,.yuimenubar .yuimenu{position:absolute;}.yuimenubar li,.yuimenu li{list-style-type:none;}.yuimenubar ul,.yuimenu ul,.yuimenubar li,.yuimenu li,.yuimenu h6,.yuimenubar h6{margin:0;padding:0;}.yuimenuitemlabel,.yuimenubaritemlabel{text-align:left;white-space:nowrap;}.yuimenubar ul{*zoom:1;}.yuimenubar .yuimenu ul{*zoom:normal;}.yuimenubar>.bd>ul:after{content:".";display:block;clear:both;visibility:hidden;height:0;line-height:0;}.yuimenubaritem{float:left;}.yuimenubaritemlabel,.yuimenuitemlabel{display:block;}.yuimenuitemlabel .helptext{font-style:normal;display:block;margin:-1em 0 0 10em;}.yui-menu-shadow{position:absolute;visibility:hidden;z-index:-1;}.yui-menu-shadow-visible{top:2px;right:-3px;left:-3px;bottom:-3px;visibility:visible;}.hide-scrollbars *{overflow:hidden;}.hide-scrollbars select{display:none;}.yuimenu.show-scrollbars,.yuimenubar.show-scrollbars{overflow:visible;}.yuimenu.hide-scrollbars .yui-menu-shadow,.yuimenubar.hide-scrollbars .yui-menu-shadow{overflow:hidden;}.yuimenu.show-scrollbars .yui-menu-shadow,.yuimenubar.show-scrollbars .yui-menu-shadow{overflow:auto;}.yui-overlay.yui-force-redraw{margin-bottom:1px;}.yui-skin-sam .yuimenubar{font-size:93%;line-height:2;*line-height:1.9;border:solid 1px #808080;background:url(/images/yui/sprite.png) repeat-x 0 0;}.yui-skin-sam .yuimenubarnav .yuimenubaritem{border-right:solid 1px #ccc;}.yui-skin-sam .yuimenubaritemlabel{padding:0 10px;color:#000;text-decoration:none;cursor:default;border-style:solid;border-color:#808080;border-width:1px 0;*position:relative;margin:-1px 0;}.yui-skin-sam .yuimenubaritemlabel:visited{color:#000;}.yui-skin-sam .yuimenubarnav .yuimenubaritemlabel{padding-right:20px;*display:inline-block;}.yui-skin-sam .yuimenubarnav .yuimenubaritemlabel-hassubmenu{background:url(/images/yui/menubaritem_submenuindicator.png) right center no-repeat;}.yui-skin-sam .yuimenubaritem-selected{background:url(/images/yui/sprite.png) repeat-x 0 -1700px;}.yui-skin-sam .yuimenubaritemlabel-selected{border-color:#7D98B8;}.yui-skin-sam .yuimenubarnav .yuimenubaritemlabel-selected{border-left-width:1px;margin-left:-1px;*left:-1px;}.yui-skin-sam .yuimenubaritemlabel-disabled,.yui-skin-sam .yuimenubaritemlabel-disabled:visited{cursor:default;color:#A6A6A6;}.yui-skin-sam .yuimenubarnav .yuimenubaritemlabel-hassubmenu-disabled{background-image:url(/images/yui/menubaritem_submenuindicator_disabled.png);}.yui-skin-sam .yuimenu{font-size:93%;line-height:1.5;*line-height:1.45;}.yui-skin-sam .yuimenubar .yuimenu,.yui-skin-sam .yuimenu .yuimenu{font-size:100%;}.yui-skin-sam .yuimenu .bd{*zoom:1;_zoom:normal;border:solid 1px #808080;background-color:#fff;}.yui-skin-sam .yuimenu .yuimenu .bd{*zoom:normal;}.yui-skin-sam .yuimenu ul{padding:3px 0;border-width:1px 0 0 0;border-color:#ccc;border-style:solid;}.yui-skin-sam .yuimenu ul.first-of-type{border-width:0;}.yui-skin-sam .yuimenu h6{font-weight:bold;border-style:solid;border-color:#ccc;border-width:1px 0 0 0;color:#a4a4a4;padding:3px 10px 0 10px;}.yui-skin-sam .yuimenu ul.hastitle,.yui-skin-sam .yuimenu h6.first-of-type{border-width:0;}.yui-skin-sam .yuimenu .yui-menu-body-scrolled{border-color:#ccc #808080;overflow:hidden;}.yui-skin-sam .yuimenu .topscrollbar,.yui-skin-sam .yuimenu .bottomscrollbar{height:16px;border:solid 1px #808080;background:#fff url(/images/yui/sprite.png) no-repeat 0 0;}.yui-skin-sam .yuimenu .topscrollbar{border-bottom-width:0;background-position:center -950px;}.yui-skin-sam .yuimenu .topscrollbar_disabled{background-position:center -975px;}.yui-skin-sam .yuimenu .bottomscrollbar{border-top-width:0;background-position:center -850px;}.yui-skin-sam .yuimenu .bottomscrollbar_disabled{background-position:center -875px;}.yui-skin-sam .yuimenuitem{_border-bottom:solid 1px #fff;}.yui-skin-sam .yuimenuitemlabel{padding:0 20px;color:#000;text-decoration:none;cursor:default;}.yui-skin-sam .yuimenuitemlabel:visited{color:#000;}.yui-skin-sam .yuimenuitemlabel .helptext{margin-top:-1.5em;*margin-top:-1.45em;}.yui-skin-sam .yuimenuitem-hassubmenu{background-image:url(/images/yui/menuitem_submenuindicator.png);background-position:right center;background-repeat:no-repeat;}.yui-skin-sam .yuimenuitem-checked{background-image:url(/images/yui/menuitem_checkbox.png);background-position:left center;background-repeat:no-repeat;}.yui-skin-sam .yui-menu-shadow-visible{background-color:#000;opacity:.12;filter:alpha(opacity=12);}.yui-skin-sam .yuimenuitem-selected{background-color:#B3D4FF;}.yui-skin-sam .yuimenuitemlabel-disabled,.yui-skin-sam .yuimenuitemlabel-disabled:visited{cursor:default;color:#A6A6A6;}.yui-skin-sam .yuimenuitem-hassubmenu-disabled{background-image:url(/images/yui/menuitem_submenuindicator_disabled.png);}.yui-skin-sam .yuimenuitem-checked-disabled{background-image:url(/images/yui/menuitem_checkbox_disabled.png);} +/* +Copyright (c) 2009, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.net/yui/license.txt +version: 2.8.0r4 +*/ +.yui-button{display:-moz-inline-box;display:inline-block;vertical-align:text-bottom;}.yui-button .first-child{display:block;*display:inline-block;}.yui-button button,.yui-button a{display:block;*display:inline-block;border:none;margin:0;}.yui-button button{background-color:transparent;*overflow:visible;cursor:pointer;}.yui-button a{text-decoration:none;}.yui-skin-sam .yui-button{border-width:1px 0;border-style:solid;border-color:#808080;background:url(/images/yui/sprite.png) repeat-x 0 0;margin:auto .25em;}.yui-skin-sam .yui-button .first-child{border-width:0 1px;border-style:solid;border-color:#808080;margin:0 -1px;_margin:0;}.yui-skin-sam .yui-button button,.yui-skin-sam .yui-button a,.yui-skin-sam .yui-button a:visited{padding:0 10px;font-size:93%;line-height:2;*line-height:1.7;min-height:2em;*min-height:auto;color:#000;}.yui-skin-sam .yui-button a{*line-height:1.875;*padding-bottom:1px;}.yui-skin-sam .yui-split-button button,.yui-skin-sam .yui-menu-button button{padding-right:20px;background-position:right center;background-repeat:no-repeat;}.yui-skin-sam .yui-menu-button button{background-image:url(/images/yui/menu-button-arrow.png);}.yui-skin-sam .yui-split-button button{background-image:url(/images/yui/split-button-arrow.png);}.yui-skin-sam .yui-button-focus{border-color:#7D98B8;background-position:0 -1300px;}.yui-skin-sam .yui-button-focus .first-child{border-color:#7D98B8;}.yui-skin-sam .yui-split-button-focus button{background-image:url(/images/yui/split-button-arrow-focus.png);}.yui-skin-sam .yui-button-hover{border-color:#7D98B8;background-position:0 -1300px;}.yui-skin-sam .yui-button-hover .first-child{border-color:#7D98B8;}.yui-skin-sam .yui-split-button-hover button{background-image:url(/images/yui/split-button-arrow-hover.png);}.yui-skin-sam .yui-button-active{border-color:#7D98B8;background-position:0 -1700px;}.yui-skin-sam .yui-button-active .first-child{border-color:#7D98B8;}.yui-skin-sam .yui-split-button-activeoption{border-color:#808080;background-position:0 0;}.yui-skin-sam .yui-split-button-activeoption .first-child{border-color:#808080;}.yui-skin-sam .yui-split-button-activeoption button{background-image:url(/images/yui/split-button-arrow-active.png);}.yui-skin-sam .yui-radio-button-checked,.yui-skin-sam .yui-checkbox-button-checked{border-color:#304369;background-position:0 -1400px;}.yui-skin-sam .yui-radio-button-checked .first-child,.yui-skin-sam .yui-checkbox-button-checked .first-child{border-color:#304369;}.yui-skin-sam .yui-radio-button-checked button,.yui-skin-sam .yui-checkbox-button-checked button{color:#fff;}.yui-skin-sam .yui-button-disabled{border-color:#ccc;background-position:0 -1500px;}.yui-skin-sam .yui-button-disabled .first-child{border-color:#ccc;}.yui-skin-sam .yui-button-disabled button,.yui-skin-sam .yui-button-disabled a,.yui-skin-sam .yui-button-disabled a:visited{color:#A6A6A6;cursor:default;}.yui-skin-sam .yui-menu-button-disabled button{background-image:url(/images/yui/menu-button-arrow-disabled.png);}.yui-skin-sam .yui-split-button-disabled button{background-image:url(/images/yui/split-button-arrow-disabled.png);} +/* +Copyright (c) 2009, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.net/yui/license.txt +version: 2.8.0r4 +*/ +.yui-navset .yui-nav li,.yui-navset .yui-navset-top .yui-nav li,.yui-navset .yui-navset-bottom .yui-nav li{margin:0 .5em 0 0;}.yui-navset-left .yui-nav li,.yui-navset-right .yui-nav li{margin:0 0 .5em;}.yui-navset .yui-content .yui-hidden{border:0;height:0;width:0;padding:0;position:absolute;left:-999999px;overflow:hidden;visibility:hidden;}.yui-navset .yui-navset-left .yui-nav,.yui-navset .yui-navset-right .yui-nav,.yui-navset-left .yui-nav,.yui-navset-right .yui-nav{width:6em;}.yui-navset-top .yui-nav,.yui-navset-bottom .yui-nav{width:auto;}.yui-navset .yui-navset-left,.yui-navset-left{padding:0 0 0 6em;}.yui-navset-right{padding:0 6em 0 0;}.yui-navset-top,.yui-navset-bottom{padding:auto;}.yui-nav,.yui-nav li{margin:0;padding:0;list-style:none;}.yui-navset li em{font-style:normal;}.yui-navset{position:relative;zoom:1;}.yui-navset .yui-content,.yui-navset .yui-content div{zoom:1;}.yui-navset .yui-content:after{content:'';display:block;clear:both;}.yui-navset .yui-nav li,.yui-navset .yui-navset-top .yui-nav li,.yui-navset .yui-navset-bottom .yui-nav li{display:inline-block;display:-moz-inline-stack;*display:inline;vertical-align:bottom;cursor:pointer;zoom:1;}.yui-navset-left .yui-nav li,.yui-navset-right .yui-nav li{display:block;}.yui-navset .yui-nav a{position:relative;}.yui-navset .yui-nav li a,.yui-navset-top .yui-nav li a,.yui-navset-bottom .yui-nav li a{display:block;display:inline-block;vertical-align:bottom;zoom:1;}.yui-navset-left .yui-nav li a,.yui-navset-right .yui-nav li a{display:block;}.yui-navset-bottom .yui-nav li a{vertical-align:text-top;}.yui-navset .yui-nav li a em,.yui-navset-top .yui-nav li a em,.yui-navset-bottom .yui-nav li a em{display:block;}.yui-navset .yui-navset-left .yui-nav,.yui-navset .yui-navset-right .yui-nav,.yui-navset-left .yui-nav,.yui-navset-right .yui-nav{position:absolute;z-index:1;}.yui-navset-top .yui-nav,.yui-navset-bottom .yui-nav{position:static;}.yui-navset .yui-navset-left .yui-nav,.yui-navset-left .yui-nav{left:0;right:auto;}.yui-navset .yui-navset-right .yui-nav,.yui-navset-right .yui-nav{right:0;left:auto;}.yui-skin-sam .yui-navset .yui-nav,.yui-skin-sam .yui-navset .yui-navset-top .yui-nav{border:solid #2647a0;border-width:0 0 5px;zoom:1;}.yui-skin-sam .yui-navset .yui-nav li,.yui-skin-sam .yui-navset .yui-navset-top .yui-nav li{margin:0 .16em 0 0;padding:1px 0 0;zoom:1;}.yui-skin-sam .yui-navset .yui-nav .selected,.yui-skin-sam .yui-navset .yui-navset-top .yui-nav .selected{margin:0 .16em -1px 0;}.yui-skin-sam .yui-navset .yui-nav a,.yui-skin-sam .yui-navset .yui-navset-top .yui-nav a{background:#d8d8d8 url(/images/yui/sprite.png) repeat-x;border:solid #a3a3a3;border-width:0 1px;color:#000;position:relative;text-decoration:none;}.yui-skin-sam .yui-navset .yui-nav a em,.yui-skin-sam .yui-navset .yui-navset-top .yui-nav a em{border:solid #a3a3a3;border-width:1px 0 0;cursor:hand;padding:.25em .75em;left:0;right:0;bottom:0;top:-1px;position:relative;}.yui-skin-sam .yui-navset .yui-nav .selected a,.yui-skin-sam .yui-navset .yui-nav .selected a:focus,.yui-skin-sam .yui-navset .yui-nav .selected a:hover{background:#2647a0 url(/images/yui/sprite.png) repeat-x left -1400px;color:#fff;}.yui-skin-sam .yui-navset .yui-nav a:hover,.yui-skin-sam .yui-navset .yui-nav a:focus{background:#bfdaff url(/images/yui/sprite.png) repeat-x left -1300px;outline:0;}.yui-skin-sam .yui-navset .yui-nav .selected a em{padding:.35em .75em;}.yui-skin-sam .yui-navset .yui-nav .selected a,.yui-skin-sam .yui-navset .yui-nav .selected a em{border-color:#243356;}.yui-skin-sam .yui-navset .yui-content{background:#edf5ff;}.yui-skin-sam .yui-navset .yui-content,.yui-skin-sam .yui-navset .yui-navset-top .yui-content{border:1px solid #808080;border-top-color:#243356;padding:.25em .5em;}.yui-skin-sam .yui-navset-left .yui-nav,.yui-skin-sam .yui-navset .yui-navset-left .yui-nav,.yui-skin-sam .yui-navset .yui-navset-right .yui-nav,.yui-skin-sam .yui-navset-right .yui-nav{border-width:0 5px 0 0;Xposition:absolute;top:0;bottom:0;}.yui-skin-sam .yui-navset .yui-navset-right .yui-nav,.yui-skin-sam .yui-navset-right .yui-nav{border-width:0 0 0 5px;}.yui-skin-sam .yui-navset-left .yui-nav li,.yui-skin-sam .yui-navset .yui-navset-left .yui-nav li,.yui-skin-sam .yui-navset-right .yui-nav li{margin:0 0 .16em;padding:0 0 0 1px;}.yui-skin-sam .yui-navset-right .yui-nav li{padding:0 1px 0 0;}.yui-skin-sam .yui-navset-left .yui-nav .selected,.yui-skin-sam .yui-navset .yui-navset-left .yui-nav .selected{margin:0 -1px .16em 0;}.yui-skin-sam .yui-navset-right .yui-nav .selected{margin:0 0 .16em -1px;}.yui-skin-sam .yui-navset-left .yui-nav a,.yui-skin-sam .yui-navset-right .yui-nav a{border-width:1px 0;}.yui-skin-sam .yui-navset-left .yui-nav a em,.yui-skin-sam .yui-navset .yui-navset-left .yui-nav a em,.yui-skin-sam .yui-navset-right .yui-nav a em{border-width:0 0 0 1px;padding:.2em .75em;top:auto;left:-1px;}.yui-skin-sam .yui-navset-right .yui-nav a em{border-width:0 1px 0 0;left:auto;right:-1px;}.yui-skin-sam .yui-navset-left .yui-nav a,.yui-skin-sam .yui-navset-left .yui-nav .selected a,.yui-skin-sam .yui-navset-left .yui-nav a:hover,.yui-skin-sam .yui-navset-right .yui-nav a,.yui-skin-sam .yui-navset-right .yui-nav .selected a,.yui-skin-sam .yui-navset-right .yui-nav a:hover,.yui-skin-sam .yui-navset-bottom .yui-nav a,.yui-skin-sam .yui-navset-bottom .yui-nav .selected a,.yui-skin-sam .yui-navset-bottom .yui-nav a:hover{background-image:none;}.yui-skin-sam .yui-navset-left .yui-content{border:1px solid #808080;border-left-color:#243356;}.yui-skin-sam .yui-navset-bottom .yui-nav,.yui-skin-sam .yui-navset .yui-navset-bottom .yui-nav{border-width:5px 0 0;}.yui-skin-sam .yui-navset .yui-navset-bottom .yui-nav .selected,.yui-skin-sam .yui-navset-bottom .yui-nav .selected{margin:-1px .16em 0 0;}.yui-skin-sam .yui-navset .yui-navset-bottom .yui-nav li,.yui-skin-sam .yui-navset-bottom .yui-nav li{padding:0 0 1px 0;vertical-align:top;}.yui-skin-sam .yui-navset .yui-navset-bottom .yui-nav a em,.yui-skin-sam .yui-navset-bottom .yui-nav a em{border-width:0 0 1px;top:auto;bottom:-1px;} +.yui-skin-sam .yui-navset-bottom .yui-content,.yui-skin-sam .yui-navset .yui-navset-bottom .yui-content{border:1px solid #808080;border-bottom-color:#243356;} diff --git a/static/images/.gitignore b/static/images/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/static/images/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/static/images/agenda-touch-icon.png b/static/images/agenda-touch-icon.png new file mode 100644 index 000000000..fade42fed Binary files /dev/null and b/static/images/agenda-touch-icon.png differ diff --git a/static/images/blue_dot.gif b/static/images/blue_dot.gif new file mode 100644 index 000000000..6db2e1ac7 Binary files /dev/null and b/static/images/blue_dot.gif differ diff --git a/static/images/color-palette-4x4.gif b/static/images/color-palette-4x4.gif new file mode 100644 index 000000000..cdab79cf9 Binary files /dev/null and b/static/images/color-palette-4x4.gif differ diff --git a/static/images/comment.png b/static/images/comment.png new file mode 100755 index 000000000..53f5a80a2 Binary files /dev/null and b/static/images/comment.png differ diff --git a/static/images/header/.gitignore b/static/images/header/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/static/images/header/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/static/images/ietf-icon-blue.bmp b/static/images/ietf-icon-blue.bmp new file mode 100644 index 000000000..7afec866b Binary files /dev/null and b/static/images/ietf-icon-blue.bmp differ diff --git a/static/images/ietflogo-blue-small.png b/static/images/ietflogo-blue-small.png new file mode 100755 index 000000000..73d721701 Binary files /dev/null and b/static/images/ietflogo-blue-small.png differ diff --git a/static/images/ietflogo-blue.png b/static/images/ietflogo-blue.png new file mode 100644 index 000000000..fe0f904cf Binary files /dev/null and b/static/images/ietflogo-blue.png differ diff --git a/static/images/ietflogo.gif b/static/images/ietflogo.gif new file mode 100644 index 000000000..2942af0f9 Binary files /dev/null and b/static/images/ietflogo.gif differ diff --git a/static/images/mail_title.gif b/static/images/mail_title.gif deleted file mode 100644 index 6eec1e598..000000000 Binary files a/static/images/mail_title.gif and /dev/null differ diff --git a/static/images/minus.png b/static/images/minus.png new file mode 100644 index 000000000..da692a6b7 Binary files /dev/null and b/static/images/minus.png differ diff --git a/static/images/nwg/.gitignore b/static/images/nwg/.gitignore new file mode 100644 index 000000000..a74b07aee --- /dev/null +++ b/static/images/nwg/.gitignore @@ -0,0 +1 @@ +/*.pyc diff --git a/static/images/nwg/mail_title_approval.gif b/static/images/nwg/mail_title_approval.gif new file mode 100644 index 000000000..403460c31 Binary files /dev/null and b/static/images/nwg/mail_title_approval.gif differ diff --git a/static/images/nwg/mail_title_internal.gif b/static/images/nwg/mail_title_internal.gif new file mode 100644 index 000000000..c4cb55e7d Binary files /dev/null and b/static/images/nwg/mail_title_internal.gif differ diff --git a/static/images/plus.png b/static/images/plus.png new file mode 100644 index 000000000..b24e46a49 Binary files /dev/null and b/static/images/plus.png differ diff --git a/static/images/square.png b/static/images/square.png new file mode 100644 index 000000000..9c0dd7ee0 Binary files /dev/null and b/static/images/square.png differ diff --git a/static/images/title_line.gif b/static/images/title_line.gif new file mode 100644 index 000000000..cbce33182 Binary files /dev/null and b/static/images/title_line.gif differ diff --git a/static/images/yui/menu-button-arrow-disabled.png b/static/images/yui/menu-button-arrow-disabled.png new file mode 100644 index 000000000..8cef2abb3 Binary files /dev/null and b/static/images/yui/menu-button-arrow-disabled.png differ diff --git a/static/images/yui/menu-button-arrow.png b/static/images/yui/menu-button-arrow.png new file mode 100644 index 000000000..f03dfee4e Binary files /dev/null and b/static/images/yui/menu-button-arrow.png differ diff --git a/static/images/yui/menubaritem_submenuindicator.png b/static/images/yui/menubaritem_submenuindicator.png new file mode 100644 index 000000000..030941c9c Binary files /dev/null and b/static/images/yui/menubaritem_submenuindicator.png differ diff --git a/static/images/yui/menubaritem_submenuindicator_disabled.png b/static/images/yui/menubaritem_submenuindicator_disabled.png new file mode 100644 index 000000000..6c1612230 Binary files /dev/null and b/static/images/yui/menubaritem_submenuindicator_disabled.png differ diff --git a/static/images/yui/menuitem_checkbox.png b/static/images/yui/menuitem_checkbox.png new file mode 100644 index 000000000..1437a4f4b Binary files /dev/null and b/static/images/yui/menuitem_checkbox.png differ diff --git a/static/images/yui/menuitem_checkbox_disabled.png b/static/images/yui/menuitem_checkbox_disabled.png new file mode 100644 index 000000000..5d5b9985e Binary files /dev/null and b/static/images/yui/menuitem_checkbox_disabled.png differ diff --git a/static/images/yui/menuitem_submenuindicator.png b/static/images/yui/menuitem_submenuindicator.png new file mode 100644 index 000000000..ea4f66029 Binary files /dev/null and b/static/images/yui/menuitem_submenuindicator.png differ diff --git a/static/images/yui/menuitem_submenuindicator_disabled.png b/static/images/yui/menuitem_submenuindicator_disabled.png new file mode 100644 index 000000000..427d60a38 Binary files /dev/null and b/static/images/yui/menuitem_submenuindicator_disabled.png differ diff --git a/static/images/yui/split-button-arrow-active.png b/static/images/yui/split-button-arrow-active.png new file mode 100644 index 000000000..fa58c5030 Binary files /dev/null and b/static/images/yui/split-button-arrow-active.png differ diff --git a/static/images/yui/split-button-arrow-disabled.png b/static/images/yui/split-button-arrow-disabled.png new file mode 100644 index 000000000..0a6a82c64 Binary files /dev/null and b/static/images/yui/split-button-arrow-disabled.png differ diff --git a/static/images/yui/split-button-arrow-focus.png b/static/images/yui/split-button-arrow-focus.png new file mode 100644 index 000000000..167d71eb7 Binary files /dev/null and b/static/images/yui/split-button-arrow-focus.png differ diff --git a/static/images/yui/split-button-arrow-hover.png b/static/images/yui/split-button-arrow-hover.png new file mode 100644 index 000000000..167d71eb7 Binary files /dev/null and b/static/images/yui/split-button-arrow-hover.png differ diff --git a/static/images/yui/split-button-arrow.png b/static/images/yui/split-button-arrow.png new file mode 100644 index 000000000..b33a93ff2 Binary files /dev/null and b/static/images/yui/split-button-arrow.png differ diff --git a/static/images/yui/sprite.png b/static/images/yui/sprite.png new file mode 100644 index 000000000..73634d6a2 Binary files /dev/null and b/static/images/yui/sprite.png differ diff --git a/static/js/agenda-documents.js b/static/js/agenda-documents.js new file mode 100644 index 000000000..b64fb4385 --- /dev/null +++ b/static/js/agenda-documents.js @@ -0,0 +1,7 @@ +jQuery(document).ready(function () { + jQuery("#clear-all-on-schedule").click(function (e) { + e.preventDefault(); + + jQuery("div.reschedule select").attr("selectedIndex", 0); + }) +}); diff --git a/static/js/agenda.js b/static/js/agenda.js new file mode 100644 index 000000000..5ce5db50e --- /dev/null +++ b/static/js/agenda.js @@ -0,0 +1,117 @@ + +// cookie functions used with permission from http://www.elated.com/articles/javascript-and-cookies/ +function set_cookie ( name, value, exp_y, exp_m, exp_d, path, domain, secure ) +{ + var cookie_string = name + "=" + escape ( value ); + + if ( exp_y ) { + var expires = new Date ( exp_y, exp_m, exp_d ); + cookie_string += "; expires=" + expires.toGMTString(); + } + + if ( path ) + cookie_string += "; path=" + escape ( path ); + + if ( domain ) + cookie_string += "; domain=" + escape ( domain ); + + if ( secure ) + cookie_string += "; secure"; + + document.cookie = cookie_string; +} +function delete_cookie ( cookie_name ) +{ + var cookie_date = new Date ( ); // current date & time + cookie_date.setTime ( cookie_date.getTime() - 1 ); + document.cookie = cookie_name += "=; expires=" + cookie_date.toGMTString(); +} +function get_cookie ( cookie_name ) +{ + var results = document.cookie.match ( '(^|;) ?' + cookie_name + '=([^;]*)(;|$)' ); + + if ( results ) + return ( unescape ( results[2] ) ); + else + return null; +} + +// set the color of a row to the proper class. optionally set the corresponding cookie. +function setcolor(id, color, skip_cookie) +{ + oneSecond = 1000; + oneMinute = 60*oneSecond; + oneHour = 60*oneMinute; + oneDay = 24*oneHour; + oneWeek = 7*oneDay; + oneMonth = 31*oneDay; + + var now = new Date(); + var exp = new Date(now.getTime() + 3*oneMonth); + + var e = $(id); + if (e) e.className = "bg" + color; + //if (!skip_cookie) set_cookie(id, color, 2009, 8, 1); + if (!skip_cookie) set_cookie(id, color, exp.getFullYear(), exp.getMonth(), exp.getDate(),"", ".ietf.org"); +} + +// return a list of all cookie name/value pairs +function get_cookie_list() +{ + var cookie = document.cookie; + var cookies = cookie.split(';'); + var cookie_list = []; + for (var i = 0; i < cookies.length; i++) { + var cookie_match = cookies[i].match('(^|;) *([^=]*)=([^;]*)(;|$)'); + if (cookie_match) { + cookie_list.push(cookie_match[2]); + cookie_list.push(cookie_match[3]); + // alert("cookie: '" + cookie_match[2] + "'='" + cookie_match[3] + "'"); + } + } + return cookie_list; +} + +// run through all cookies and set the colors of each row +function set_cookie_colors() +{ + var cl = get_cookie_list(); + for (var i = 0; i < cl.length; i += 2) { + setcolor(cl[i], cl[i+1], true); + } + Element.hide('colorpallet'); +} + +// the current color being picked by the popup +var curid; + +// pop up the pallet to let a color be picked +function pickcolor(id) +{ + curid = id; + var colorpallet = $('colorpallet'); + if (colorpallet) { + Element.show(colorpallet); + Element.absolutize(colorpallet); + Element.clonePosition(colorpallet, "p-" + id); + } +} + +// called by the pallet popup to set the current color +function setcurcolor(color) +{ + setcolor(curid, color); + var colorpallet = $('colorpallet'); + if (colorpallet) { + Element.hide(colorpallet); + } +} + +// open up a new window showing the given room +function venue(room) +{ + window.open('venue/?room=' + room, 'IETF meeting rooms', + 'scrollbars=no,toolbar=no,width=621,height=560'); + return false; +} + diff --git a/static/js/agenda2.js b/static/js/agenda2.js new file mode 100644 index 000000000..c2989ecc0 --- /dev/null +++ b/static/js/agenda2.js @@ -0,0 +1,73 @@ +// Based on agenda.js written by Tony Hansen. + +// Portion Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). +// All rights reserved. Contact: Pasi Eronen +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// +// * Neither the name of the Nokia Corporation and/or its +// subsidiary(-ies) nor the names of its contributors may be used +// to endorse or promote products derived from this software +// without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +function setAgendaColor(color) { + IETF.agendaPalette.hide(); + document.getElementById(IETF.agendaRow).className="bg"+color; + if (color == 'none') { + YAHOO.util.Cookie.removeSub("ietf-agenda-colors", IETF.agendaRow); + } else { + var twoMonths = new Date(new Date().getTime() + 60*24*60*60*1000); + YAHOO.util.Cookie.setSub("ietf-agenda-colors", IETF.agendaRow, color, { expires:twoMonths }); + } +} +function createPalette() { + IETF.agendaPalette = new YAHOO.widget.Overlay("ietf-agenda-palette", { constraintoviewport:true, visible:false } ); + var body = ''; + var c = ['aqua', 'blue', 'fuchsia', 'gray', 'green', 'lime', + 'maroon', 'navy', 'olive', 'purple', 'red', 'silver', + 'teal', 'white', 'yellow', 'black']; + for (var i = 0; i < c.length; i++) { + if ((i%4) == 0) { body += "" } + body += ''; + if ((i%4) == 3) { body += "" } + } + body += '
    Select a color for this line
    '+c[i]+'
    none
    '; + IETF.agendaPalette.setBody(body); + IETF.agendaPalette.render(document.body); +} +function pickAgendaColor(row, place) { + if (!IETF.agendaPalette) { + createPalette(); + } + IETF.agendaRow = row; + IETF.agendaPalette.cfg.setProperty("context", [place, "tl", "tl"]); + IETF.agendaPalette.show(); +} +function updateAgendaColors() { + var colors = YAHOO.util.Cookie.getSubs("ietf-agenda-colors"); + for (var k in colors) { + document.getElementById(k).className="bg"+colors[k]; + } +} diff --git a/static/js/base.js b/static/js/base.js new file mode 100644 index 000000000..5973338c5 --- /dev/null +++ b/static/js/base.js @@ -0,0 +1,75 @@ +// Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). +// All rights reserved. Contact: Pasi Eronen +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// +// * Neither the name of the Nokia Corporation and/or its +// subsidiary(-ies) nor the names of its contributors may be used +// to endorse or promote products derived from this software +// without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +function showBallot(draftName, editPositionUrl) { + var handleEditPosition = function() { + IETF.ballotDialog.hide(); + window.location = editPositionUrl; + }; + var handleClose = function() { + IETF.ballotDialog.hide(); + }; + var el; + + if (!IETF.ballotDialog) { + el = document.createElement("div"); + el.innerHTML = '

    '; + document.getElementById("ietf-extras").appendChild(el); + + var buttons = [{text:"Close", handler:handleClose, isDefault:true}]; + if (("Area_Director" in IETF.user_groups) || + ("Secretariat" in IETF.user_groups)) { + buttons.unshift({text:"Edit Position", handler:handleEditPosition}); + } + var kl = [new YAHOO.util.KeyListener(document, {keys:27}, handleClose)] + IETF.ballotDialog = new YAHOO.widget.Dialog("ballot_dialog", { + visible:false, draggable:false, close:true, modal:true, + width:"860px", fixedcenter:true, constraintoviewport:true, + buttons: buttons, keylisteners:kl}); + IETF.ballotDialog.render(); + } + document.getElementById("ballot_dialog_name").innerHTML = draftName; + + IETF.ballotDialog.show(); + + el = document.getElementById("ballot_dialog_body"); + el.innerHTML = "Loading..."; + YAHOO.util.Connect.asyncRequest('GET', + "/doc/"+draftName+"/_ballot.data", + { success: function(o) { el.innerHTML = (o.responseText !== undefined) ? o.responseText : "?"; }, + failure: function(o) { el.innerHTML = "Error: "+o.status+" "+o.statusText; }, + argument: null + }, null); +} +function editBallot(editPositionUrl) { + window.open(editPositionUrl); +} diff --git a/static/js/datatracker-search.xml b/static/js/datatracker-search.xml new file mode 100644 index 000000000..0d8855e20 --- /dev/null +++ b/static/js/datatracker-search.xml @@ -0,0 +1,13 @@ + + + IETF Datatracker Search + Use datatracker.ietf.org to search for Internet-Drafts and RFCs + Requests For Comments + + datatracker.ietf.org RFC and Internet-Draft Search + http://datatracker.ietf.org/images/ietf-icon-blue.bmp + Tony Hansen + en-us + + diff --git a/static/js/dateformat.js b/static/js/dateformat.js new file mode 100644 index 000000000..3de9580fa --- /dev/null +++ b/static/js/dateformat.js @@ -0,0 +1,161 @@ +/** + * Date.format() + * string format ( string format ) + * Formatting rules according to http://php.net/strftime + * + * Copyright (C) 2006 Dao Gottwald + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + * Contact information: + * Dao Gottwald + * + * @version 0.7 + * @todo %g, %G, %U, %V, %W, %z, more/better localization + * @url http://design-noir.de/webdev/JS/Date.format/ + */ + +var _lang = (navigator.systemLanguage || navigator.userLanguage || navigator.language || navigator.browserLanguage || '').replace(/-.*/,''); +switch (_lang) { + case 'de': + Date._l10n = { + days: ['Sonntag','Montag','Dienstag','Mittwoch','Donnerstag','Freitag','Samstag'], + months: ['Januar','Februar','M\u00E4rz','April','Mai','Juni','Juli','August','September','Oktober','November','Dezember'], + date: '%e.%m.%Y', + time: '%H:%M:%S'}; + break; + case 'es': + Date._l10n = { + days: ['Domingo','Lunes','Martes','Mircoles','Jueves','Viernes','S\u00E1bado'], + months: ['enero','febrero','marcha','abril','puede','junio','julio','agosto','septiembre','octubre','noviembre','diciembre'], + date: '%e.%m.%Y', + time: '%H:%M:%S'}; + break; + case 'fr': + Date._l10n = { + days: ['dimanche','lundi','mardi','mercredi','jeudi','vendredi','samedi'], + months: ['janvier','f\u00E9vrier','mars','avril','mai','juin','juillet','ao\u00FBt','septembre','octobre','novembre','decembre'], + date: '%e/%m/%Y', + time: '%H:%M:%S'}; + break; + case 'it': + Date._l10n = { + days: ['domenica','luned\u00EC','marted\u00EC','mercoled\u00EC','gioved\u00EC','venerd\u00EC','sabato'], + months: ['gennaio','febbraio','marzo','aprile','maggio','giugno','luglio','agosto','settembre','ottobre','novembre','dicembre'], + date: '%e/%m/%y', + time: '%H.%M.%S'}; + break; + case 'pt': + Date._l10n = { + days: ['Domingo','Segunda-feira','Ter\u00E7a-feira','Quarta-feira','Quinta-feira','Sexta-feira','S\u00E1bado'], + months: ['Janeiro','Fevereiro','Mar\u00E7o','Abril','Maio','Junho','Julho','Agosto','Setembro','Outubro','Novembro','Dezembro'], + date: '%e/%m/%y', + time: '%H.%M.%S'}; + break; + case 'en': + default: + Date._l10n = { + days: ['Sunday','Monday','Tuesday','Wednesday','Thursday','Friday','Saturday'], + months: ['January','February','March','April','May','June','July','August','September','October','November','December'], + date: '%Y-%m-%e', + time: '%H:%M:%S'}; + break; +} +Date._pad = function(num, len) { + for (var i = 1; i <= len; i++) + if (num < Math.pow(10, i)) + return new Array(len-i+1).join(0) + num; + return num; +}; +Date.prototype.format = function(format) { + if (format.indexOf('%%') > -1) { // a literal `%' character + format = format.split('%%'); + for (var i = 0; i < format.length; i++) + format[i] = this.format(format[i]); + return format.join('%'); + } + format = format.replace(/%D/g, '%m/%d/%y'); // same as %m/%d/%y + format = format.replace(/%r/g, '%I:%M:%S %p'); // time in a.m. and p.m. notation + format = format.replace(/%R/g, '%H:%M:%S'); // time in 24 hour notation + format = format.replace(/%T/g, '%H:%M:%S'); // current time, equal to %H:%M:%S + format = format.replace(/%x/g, Date._l10n.date); // preferred date representation for the current locale without the time + format = format.replace(/%X/g, Date._l10n.time); // preferred time representation for the current locale without the date + var dateObj = this; + return format.replace(/%([aAbhBcCdegGHIjmMnpStuUVWwyYzZ])/g, function(match0, match1) { + return dateObj.format_callback(match0, match1); + }); +} +Date.prototype.format_callback = function(match0, match1) { + switch (match1) { + case 'a': // abbreviated weekday name according to the current locale + return Date._l10n.days[this.getDay()].substr(0,3); + case 'A': // full weekday name according to the current locale + return Date._l10n.days[this.getDay()]; + case 'b': + case 'h': // abbreviated month name according to the current locale + return Date._l10n.months[this.getMonth()].substr(0,3); + case 'B': // full month name according to the current locale + return Date._l10n.months[this.getMonth()]; + case 'c': // preferred date and time representation for the current locale + return this.toLocaleString(); + case 'C': // century number (the year divided by 100 and truncated to an integer, range 00 to 99) + return Math.floor(this.getFullYear() / 100); + case 'd': // day of the month as a decimal number (range 01 to 31) + return Date._pad(this.getDate(), 2); + case 'e': // day of the month as a decimal number, a single digit is preceded by a space (range ' 1' to '31') + return Date._pad(this.getDate(), 2); + /*case 'g': // like %G, but without the century + return ; + case 'G': // The 4-digit year corresponding to the ISO week number (see %V). This has the same format and value as %Y, except that if the ISO week number belongs to the previous or next year, that year is used instead + return ;*/ + case 'H': // hour as a decimal number using a 24-hour clock (range 00 to 23) + return Date._pad(this.getHours(), 2); + case 'I': // hour as a decimal number using a 12-hour clock (range 01 to 12) + return Date._pad(this.getHours() % 12, 2); + case 'j': // day of the year as a decimal number (range 001 to 366) + return Date._pad(this.getMonth() * 30 + Math.ceil(this.getMonth() / 2) + this.getDay() - 2 * (this.getMonth() > 1) + (!(this.getFullYear() % 400) || (!(this.getFullYear() % 4) && this.getFullYear() % 100)), 3); + case 'm': // month as a decimal number (range 01 to 12) + return Date._pad(this.getMonth() + 1, 2); + case 'M': // minute as a decimal number + return Date._pad(this.getMinutes(), 2); + case 'n': // newline character + return '\n'; + case 'p': // either `am' or `pm' according to the given time value, or the corresponding strings for the current locale + return this.getHours() < 12 ? 'am' : 'pm'; + case 'S': // second as a decimal number + return Date._pad(this.getSeconds(), 2); + case 't': // tab character + return '\t'; + case 'u': // weekday as a decimal number [1,7], with 1 representing Monday + return this.getDay() || 7; + /*case 'U': // week number of the current year as a decimal number, starting with the first Sunday as the first day of the first week + return ; + case 'V': // The ISO 8601:1988 week number of the current year as a decimal number, range 01 to 53, where week 1 is the first week that has at least 4 days in the current year, and with Monday as the first day of the week. (Use %G or %g for the year component that corresponds to the week number for the specified timestamp.) + return ; + case 'W': // week number of the current year as a decimal number, starting with the first Monday as the first day of the first week + return ;*/ + case 'w': // day of the week as a decimal, Sunday being 0 + return this.getDay(); + case 'y': // year as a decimal number without a century (range 00 to 99) + return this.getFullYear().toString().substr(2); + case 'Y': // year as a decimal number including the century + return this.getFullYear(); + /*case 'z': + case 'Z': // time zone or name or abbreviation + return ;*/ + default: + return match0; + } +} \ No newline at end of file diff --git a/static/js/doc-edit-position.js b/static/js/doc-edit-position.js new file mode 100644 index 000000000..6105ac317 --- /dev/null +++ b/static/js/doc-edit-position.js @@ -0,0 +1,14 @@ +jQuery(document).ready(function () { + function setDiscussWidgetVisibility(discuss) { + if (discuss) + jQuery("form.position-form .discuss-widgets").show(); + else + jQuery("form.position-form .discuss-widgets").hide(); + } + + jQuery("form.position-form input[name=position]").click(function (e) { + setDiscussWidgetVisibility(jQuery(this).val() == "discuss"); + }); + + setDiscussWidgetVisibility(jQuery("form.position-form input[name=position][value=discuss]").is(':checked')); +}); diff --git a/static/js/lib/jquery-1.4.2.min.js b/static/js/lib/jquery-1.4.2.min.js new file mode 100644 index 000000000..7c2430802 --- /dev/null +++ b/static/js/lib/jquery-1.4.2.min.js @@ -0,0 +1,154 @@ +/*! + * jQuery JavaScript Library v1.4.2 + * http://jquery.com/ + * + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * Includes Sizzle.js + * http://sizzlejs.com/ + * Copyright 2010, The Dojo Foundation + * Released under the MIT, BSD, and GPL Licenses. + * + * Date: Sat Feb 13 22:33:48 2010 -0500 + */ +(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/, +Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&& +(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this, +a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b=== +"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this, +function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b
    a"; +var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected, +parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent= +false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n= +s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true, +applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando]; +else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this, +a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b=== +w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i, +cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected= +c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); +a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g, +function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split("."); +k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a), +C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B=0){a.type= +e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&& +f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive; +if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data", +e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a, +"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a, +d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, +e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift(); +t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D|| +g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()}, +CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m, +g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)}, +text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}}, +setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return hl[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h= +h[3];l=0;for(m=h.length;l=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m=== +"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g, +h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&& +q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML=""; +if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="

    ";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}(); +(function(){var g=s.createElement("div");g.innerHTML="
    ";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}: +function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f0)for(var j=d;j0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j= +{},i;if(f&&a.length){e=0;for(var o=a.length;e-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a=== +"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode", +d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")? +a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType=== +1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/"},F={option:[1,""],legend:[1,"
    ","
    "],thead:[1,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],col:[2,"","
    "],area:[1,"",""],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div
    ","
    "];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d= +c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this}, +wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})}, +prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b, +this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild); +return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja, +""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]); +return this}else{e=0;for(var j=d.length;e0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["", +""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]===""&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e= +c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]? +c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja= +function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter= +Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a, +"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f= +a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b= +a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=//gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!== +"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("
    ").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this}, +serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "), +function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href, +global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&& +e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)? +"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache=== +false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B= +false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since", +c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E|| +d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x); +g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status=== +1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b=== +"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional; +if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration=== +"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]|| +c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start; +this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now= +this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem, +e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b
    "; +a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b); +c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a, +d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top- +f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset": +"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in +e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window); diff --git a/static/js/working-group-actions.js b/static/js/working-group-actions.js new file mode 100644 index 000000000..2d1e7f3f5 --- /dev/null +++ b/static/js/working-group-actions.js @@ -0,0 +1,5 @@ +jQuery(document).ready(function () { + jQuery(".permanent-delete").click(function (e) { + return confirm('Delete file permanently from the server?'); + }); +}); diff --git a/static/js/yui/yui-20100305.js b/static/js/yui/yui-20100305.js new file mode 100644 index 000000000..577654453 --- /dev/null +++ b/static/js/yui/yui-20100305.js @@ -0,0 +1,97 @@ +/* +Copyright (c) 2009, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.net/yui/license.txt +version: 2.8.0r4 +*/ +if(typeof YAHOO=="undefined"||!YAHOO){var YAHOO={};}YAHOO.namespace=function(){var A=arguments,E=null,C,B,D;for(C=0;C0)?B.dump(I[K],N-1):Q);}else{P.push(I[K]);}P.push(O);}if(P.length>1){P.pop();}P.push("]");}else{P.push("{");for(K in I){if(B.hasOwnProperty(I,K)){P.push(K+L);if(B.isObject(I[K])){P.push((N>0)?B.dump(I[K],N-1):Q);}else{P.push(I[K]);}P.push(O);}}if(P.length>1){P.pop();}P.push("}");}return P.join("");},substitute:function(Y,J,R){var N,M,L,U,V,X,T=[],K,O="dump",S=" ",I="{",W="}",Q,P;for(;;){N=Y.lastIndexOf(I);if(N<0){break;}M=Y.indexOf(W,N);if(N+1>=M){break;}K=Y.substring(N+1,M);U=K;X=null;L=U.indexOf(S);if(L>-1){X=U.substring(L+1);U=U.substring(0,L);}V=J[U];if(R){V=R(U,V,X);}if(B.isObject(V)){if(B.isArray(V)){V=B.dump(V,parseInt(X,10));}else{X=X||"";Q=X.indexOf(O);if(Q>-1){X=X.substring(4);}P=V.toString();if(P===G||Q>-1){V=B.dump(V,parseInt(X,10));}else{V=P;}}}else{if(!B.isString(V)&&!B.isNumber(V)){V="~-"+T.length+"-~";T[T.length]=K;}}Y=Y.substring(0,N)+V+Y.substring(M+1);}for(N=T.length-1;N>=0;N=N-1){Y=Y.replace(new RegExp("~-"+N+"-~"),"{"+T[N]+"}","g");}return Y;},trim:function(I){try{return I.replace(/^\s+|\s+$/g,"");}catch(J){return I;}},merge:function(){var L={},J=arguments,I=J.length,K;for(K=0;K0){H=C[0];}try{B=F.fn.call(E,H,F.obj);}catch(G){this.lastError=G;if(A){throw G;}}}else{try{B=F.fn.call(E,this.type,C,F.obj);}catch(D){this.lastError=D;if(A){throw D;}}}return B;},unsubscribeAll:function(){var A=this.subscribers.length,B;for(B=A-1;B>-1;B--){this._delete(B);}this.subscribers=[];return A;},_delete:function(A){var B=this.subscribers[A];if(B){delete B.fn;delete B.obj;}this.subscribers.splice(A,1);},toString:function(){return"CustomEvent: "+"'"+this.type+"', "+"context: "+this.scope;}};YAHOO.util.Subscriber=function(A,B,C){this.fn=A;this.obj=YAHOO.lang.isUndefined(B)?null:B;this.overrideContext=C;};YAHOO.util.Subscriber.prototype.getScope=function(A){if(this.overrideContext){if(this.overrideContext===true){return this.obj;}else{return this.overrideContext;}}return A;};YAHOO.util.Subscriber.prototype.contains=function(A,B){if(B){return(this.fn==A&&this.obj==B);}else{return(this.fn==A);}};YAHOO.util.Subscriber.prototype.toString=function(){return"Subscriber { obj: "+this.obj+", overrideContext: "+(this.overrideContext||"no")+" }";};if(!YAHOO.util.Event){YAHOO.util.Event=function(){var G=false,H=[],J=[],A=0,E=[],B=0,C={63232:38,63233:40,63234:37,63235:39,63276:33,63277:34,25:9},D=YAHOO.env.ua.ie,F="focusin",I="focusout";return{POLL_RETRYS:500,POLL_INTERVAL:40,EL:0,TYPE:1,FN:2,WFN:3,UNLOAD_OBJ:3,ADJ_SCOPE:4,OBJ:5,OVERRIDE:6,CAPTURE:7,lastError:null,isSafari:YAHOO.env.ua.webkit,webkit:YAHOO.env.ua.webkit,isIE:D,_interval:null,_dri:null,_specialTypes:{focusin:(D?"focusin":"focus"),focusout:(D?"focusout":"blur")},DOMReady:false,throwErrors:false,startInterval:function(){if(!this._interval){this._interval=YAHOO.lang.later(this.POLL_INTERVAL,this,this._tryPreloadAttach,null,true);}},onAvailable:function(Q,M,O,P,N){var K=(YAHOO.lang.isString(Q))?[Q]:Q;for(var L=0;L-1;M--){S=(this.removeListener(L[M],K,R)&&S);}return S;}}if(!R||!R.call){return this.purgeElement(L,false,K);}if("unload"==K){for(M=J.length-1;M>-1;M--){U=J[M];if(U&&U[0]==L&&U[1]==K&&U[2]==R){J.splice(M,1);return true;}}return false;}var N=null;var O=arguments[3];if("undefined"===typeof O){O=this._getCacheIndex(H,L,K,R);}if(O>=0){N=H[O];}if(!L||!N){return false;}var T=N[this.CAPTURE]===true?true:false;try{this._simpleRemove(L,K,N[this.WFN],T);}catch(Q){this.lastError=Q;return false;}delete H[O][this.WFN];delete H[O][this.FN];H.splice(O,1);return true;},getTarget:function(M,L){var K=M.target||M.srcElement;return this.resolveTextNode(K);},resolveTextNode:function(L){try{if(L&&3==L.nodeType){return L.parentNode;}}catch(K){}return L;},getPageX:function(L){var K=L.pageX;if(!K&&0!==K){K=L.clientX||0;if(this.isIE){K+=this._getScrollLeft();}}return K;},getPageY:function(K){var L=K.pageY;if(!L&&0!==L){L=K.clientY||0;if(this.isIE){L+=this._getScrollTop();}}return L;},getXY:function(K){return[this.getPageX(K),this.getPageY(K)];},getRelatedTarget:function(L){var K=L.relatedTarget;if(!K){if(L.type=="mouseout"){K=L.toElement; +}else{if(L.type=="mouseover"){K=L.fromElement;}}}return this.resolveTextNode(K);},getTime:function(M){if(!M.time){var L=new Date().getTime();try{M.time=L;}catch(K){this.lastError=K;return L;}}return M.time;},stopEvent:function(K){this.stopPropagation(K);this.preventDefault(K);},stopPropagation:function(K){if(K.stopPropagation){K.stopPropagation();}else{K.cancelBubble=true;}},preventDefault:function(K){if(K.preventDefault){K.preventDefault();}else{K.returnValue=false;}},getEvent:function(M,K){var L=M||window.event;if(!L){var N=this.getEvent.caller;while(N){L=N.arguments[0];if(L&&Event==L.constructor){break;}N=N.caller;}}return L;},getCharCode:function(L){var K=L.keyCode||L.charCode||0;if(YAHOO.env.ua.webkit&&(K in C)){K=C[K];}return K;},_getCacheIndex:function(M,P,Q,O){for(var N=0,L=M.length;N0&&E.length>0);}var P=[];var R=function(T,U){var S=T;if(U.overrideContext){if(U.overrideContext===true){S=U.obj;}else{S=U.overrideContext;}}U.fn.call(S,U.obj);};var L,K,O,N,M=[];for(L=0,K=E.length;L-1;L--){O=E[L];if(!O||!O.id){E.splice(L,1);}}this.startInterval();}else{if(this._interval){this._interval.cancel();this._interval=null;}}this.locked=false;},purgeElement:function(O,P,R){var M=(YAHOO.lang.isString(O))?this.getEl(O):O;var Q=this.getListeners(M,R),N,K;if(Q){for(N=Q.length-1;N>-1;N--){var L=Q[N];this.removeListener(M,L.type,L.fn);}}if(P&&M&&M.childNodes){for(N=0,K=M.childNodes.length;N-1;N--){M=H[N];if(M){L.removeListener(M[L.EL],M[L.TYPE],M[L.FN],N);}}M=null;}L._simpleRemove(window,"unload",L._unload);},_getScrollLeft:function(){return this._getScroll()[1];},_getScrollTop:function(){return this._getScroll()[0];},_getScroll:function(){var K=document.documentElement,L=document.body;if(K&&(K.scrollTop||K.scrollLeft)){return[K.scrollTop,K.scrollLeft];}else{if(L){return[L.scrollTop,L.scrollLeft];}else{return[0,0];}}},regCE:function(){},_simpleAdd:function(){if(window.addEventListener){return function(M,N,L,K){M.addEventListener(N,L,(K));};}else{if(window.attachEvent){return function(M,N,L,K){M.attachEvent("on"+N,L);};}else{return function(){};}}}(),_simpleRemove:function(){if(window.removeEventListener){return function(M,N,L,K){M.removeEventListener(N,L,(K));};}else{if(window.detachEvent){return function(L,M,K){L.detachEvent("on"+M,K);};}else{return function(){};}}}()};}();(function(){var EU=YAHOO.util.Event;EU.on=EU.addListener;EU.onFocus=EU.addFocusListener;EU.onBlur=EU.addBlurListener; +/* DOMReady: based on work by: Dean Edwards/John Resig/Matthias Miller/Diego Perini */ +if(EU.isIE){if(self!==self.top){document.onreadystatechange=function(){if(document.readyState=="complete"){document.onreadystatechange=null;EU._ready();}};}else{YAHOO.util.Event.onDOMReady(YAHOO.util.Event._tryPreloadAttach,YAHOO.util.Event,true);var n=document.createElement("p");EU._dri=setInterval(function(){try{n.doScroll("left");clearInterval(EU._dri);EU._dri=null;EU._ready();n=null;}catch(ex){}},EU.POLL_INTERVAL);}}else{if(EU.webkit&&EU.webkit<525){EU._dri=setInterval(function(){var rs=document.readyState;if("loaded"==rs||"complete"==rs){clearInterval(EU._dri);EU._dri=null;EU._ready();}},EU.POLL_INTERVAL);}else{EU._simpleAdd(document,"DOMContentLoaded",EU._ready);}}EU._simpleAdd(window,"load",EU._load);EU._simpleAdd(window,"unload",EU._unload);EU._tryPreloadAttach();})();}YAHOO.util.EventProvider=function(){};YAHOO.util.EventProvider.prototype={__yui_events:null,__yui_subscribers:null,subscribe:function(A,C,F,E){this.__yui_events=this.__yui_events||{};var D=this.__yui_events[A];if(D){D.subscribe(C,F,E);}else{this.__yui_subscribers=this.__yui_subscribers||{};var B=this.__yui_subscribers;if(!B[A]){B[A]=[];}B[A].push({fn:C,obj:F,overrideContext:E});}},unsubscribe:function(C,E,G){this.__yui_events=this.__yui_events||{};var A=this.__yui_events;if(C){var F=A[C];if(F){return F.unsubscribe(E,G);}}else{var B=true;for(var D in A){if(YAHOO.lang.hasOwnProperty(A,D)){B=B&&A[D].unsubscribe(E,G);}}return B;}return false;},unsubscribeAll:function(A){return this.unsubscribe(A); +},createEvent:function(B,G){this.__yui_events=this.__yui_events||{};var E=G||{},D=this.__yui_events,F;if(D[B]){}else{F=new YAHOO.util.CustomEvent(B,E.scope||this,E.silent,YAHOO.util.CustomEvent.FLAT,E.fireOnce);D[B]=F;if(E.onSubscribeCallback){F.subscribeEvent.subscribe(E.onSubscribeCallback);}this.__yui_subscribers=this.__yui_subscribers||{};var A=this.__yui_subscribers[B];if(A){for(var C=0;C=200&&E<300)||E===1223||C){A=B.xdr?B.r:this.createResponseObject(B,G);if(I&&I.success){if(!I.scope){I.success(A);}else{I.success.apply(I.scope,[A]);}}this.successEvent.fire(A);if(B.successEvent){B.successEvent.fire(A);}}else{switch(E){case 12002:case 12029:case 12030:case 12031:case 12152:case 13030:A=this.createExceptionObject(B.tId,G,(D?D:false));if(I&&I.failure){if(!I.scope){I.failure(A);}else{I.failure.apply(I.scope,[A]);}}break;default:A=(B.xdr)?B.response:this.createResponseObject(B,G);if(I&&I.failure){if(!I.scope){I.failure(A);}else{I.failure.apply(I.scope,[A]);}}}this.failureEvent.fire(A);if(B.failureEvent){B.failureEvent.fire(A);}}this.releaseObject(B);A=null;},createResponseObject:function(A,G){var D={},I={},E,C,F,B;try{C=A.conn.getAllResponseHeaders();F=C.split("\n");for(E=0;E'+''+''+"",K=document.createElement("div");document.body.appendChild(K);K.innerHTML=J;}function B(L,I,J,M,K){H[parseInt(L.tId)]={"o":L,"c":M};if(K){M.method=I;M.data=K;}L.conn.send(J,M,L.tId);}function E(I){D(I);G._transport=document.getElementById("YUIConnectionSwf");}function C(){G.xdrReadyEvent.fire();}function A(J,I){if(J){G.startEvent.fire(J,I.argument);if(J.startEvent){J.startEvent.fire(J,I.argument);}}}function F(J){var K=H[J.tId].o,I=H[J.tId].c;if(J.statusText==="xdr:start"){A(K,I);return;}J.responseText=decodeURI(J.responseText);K.r=J;if(I.argument){K.r.argument=I.argument;}this.handleTransactionResponse(K,I,J.statusText==="xdr:abort"?true:false);delete H[J.tId];}G.xdr=B;G.swf=D;G.transport=E;G.xdrReadyEvent=new YAHOO.util.CustomEvent("xdrReady");G.xdrReady=C;G.handleXdrResponse=F;})();(function(){var D=YAHOO.util.Connect,F=YAHOO.util.Event;D._isFormSubmit=false;D._isFileUpload=false;D._formNode=null;D._sFormData=null;D._submitElementValue=null;D.uploadEvent=new YAHOO.util.CustomEvent("upload"),D._hasSubmitListener=function(){if(F){F.addListener(document,"click",function(J){var I=F.getTarget(J),H=I.nodeName.toLowerCase();if((H==="input"||H==="button")&&(I.type&&I.type.toLowerCase()=="submit")){D._submitElementValue=encodeURIComponent(I.name)+"="+encodeURIComponent(I.value);}});return true;}return false;}();function G(T,O,J){var S,I,R,P,W,Q=false,M=[],V=0,L,N,K,U,H;this.resetFormState();if(typeof T=="string"){S=(document.getElementById(T)||document.forms[T]);}else{if(typeof T=="object"){S=T;}else{return;}}if(O){this.createFrame(J?J:null);this._isFormSubmit=true;this._isFileUpload=true;this._formNode=S;return;}for(L=0,N=S.elements.length;L-1){H=I.options[I.selectedIndex];M[V++]=R+encodeURIComponent((H.attributes.value&&H.attributes.value.specified)?H.value:H.text);}break;case"select-multiple":if(I.selectedIndex>-1){for(K=I.selectedIndex,U=I.options.length;K');if(typeof H=="boolean"){J.src="javascript:false";}}else{J=document.createElement("iframe");J.id=I;J.name=I;}J.style.position="absolute";J.style.top="-1000px";J.style.left="-1000px";document.body.appendChild(J);}function E(H){var K=[],I=H.split("&"),J,L;for(J=0;J0){for(P=0;P519)?true:false);while((G=G[u])){z[0]+=G[b];z[1]+=G[P];if(AC){z=E.Dom._calcBorders(G,z);}}if(E.Dom._getStyle(y,p)!==f){G=y;while((G=G[Z])&&G[C]){AA=G[i];AB=G[O];if(H&&(E.Dom._getStyle(G,"overflow")!=="visible")){z=E.Dom._calcBorders(G,z);}if(AA||AB){z[0]-=AB;z[1]-=AA;}}z[0]+=x;z[1]+=Y;}else{if(D){z[0]-=x;z[1]-=Y;}else{if(I||H){z[0]+=x;z[1]+=Y;}}}z[0]=Math.floor(z[0]);z[1]=Math.floor(z[1]);}else{}return z;};}}(),getX:function(G){var Y=function(x){return E.Dom.getXY(x)[0];};return E.Dom.batch(G,Y,E.Dom,true);},getY:function(G){var Y=function(x){return E.Dom.getXY(x)[1];};return E.Dom.batch(G,Y,E.Dom,true);},setXY:function(G,x,Y){E.Dom.batch(G,E.Dom._setXY,{pos:x,noRetry:Y});},_setXY:function(G,z){var AA=E.Dom._getStyle(G,p),y=E.Dom.setStyle,AD=z.pos,Y=z.noRetry,AB=[parseInt(E.Dom.getComputedStyle(G,j),10),parseInt(E.Dom.getComputedStyle(G,o),10)],AC,x;if(AA=="static"){AA=V;y(G,p,AA);}AC=E.Dom._getXY(G);if(!AD||AC===false){return false;}if(isNaN(AB[0])){AB[0]=(AA==V)?0:G[b];}if(isNaN(AB[1])){AB[1]=(AA==V)?0:G[P];}if(AD[0]!==null){y(G,j,AD[0]-AC[0]+AB[0]+"px");}if(AD[1]!==null){y(G,o,AD[1]-AC[1]+AB[1]+"px");}if(!Y){x=E.Dom._getXY(G);if((AD[0]!==null&&x[0]!=AD[0])||(AD[1]!==null&&x[1]!=AD[1])){E.Dom._setXY(G,{pos:AD,noRetry:true});}}},setX:function(Y,G){E.Dom.setXY(Y,[G,null]);},setY:function(G,Y){E.Dom.setXY(G,[null,Y]);},getRegion:function(G){var Y=function(x){var y=false;if(E.Dom._canPosition(x)){y=E.Region.getRegion(x);}else{}return y;};return E.Dom.batch(G,Y,E.Dom,true);},getClientWidth:function(){return E.Dom.getViewportWidth();},getClientHeight:function(){return E.Dom.getViewportHeight();},getElementsByClassName:function(AB,AF,AC,AE,x,AD){AF=AF||"*";AC=(AC)?E.Dom.get(AC):null||K;if(!AC){return[];}var Y=[],G=AC.getElementsByTagName(AF),z=E.Dom.hasClass;for(var y=0,AA=G.length;y-1;}}else{}return G;},addClass:function(Y,G){return E.Dom.batch(Y,E.Dom._addClass,G);},_addClass:function(x,Y){var G=false,y;if(x&&Y){y=E.Dom._getAttribute(x,F)||J;if(!E.Dom._hasClass(x,Y)){E.Dom.setAttribute(x,F,A(y+B+Y));G=true;}}else{}return G;},removeClass:function(Y,G){return E.Dom.batch(Y,E.Dom._removeClass,G);},_removeClass:function(y,x){var Y=false,AA,z,G;if(y&&x){AA=E.Dom._getAttribute(y,F)||J;E.Dom.setAttribute(y,F,AA.replace(E.Dom._getClassRegex(x),J));z=E.Dom._getAttribute(y,F);if(AA!==z){E.Dom.setAttribute(y,F,A(z));Y=true;if(E.Dom._getAttribute(y,F)===""){G=(y.hasAttribute&&y.hasAttribute(g))?g:F; +y.removeAttribute(G);}}}else{}return Y;},replaceClass:function(x,Y,G){return E.Dom.batch(x,E.Dom._replaceClass,{from:Y,to:G});},_replaceClass:function(y,x){var Y,AB,AA,G=false,z;if(y&&x){AB=x.from;AA=x.to;if(!AA){G=false;}else{if(!AB){G=E.Dom._addClass(y,x.to);}else{if(AB!==AA){z=E.Dom._getAttribute(y,F)||J;Y=(B+z.replace(E.Dom._getClassRegex(AB),B+AA)).split(E.Dom._getClassRegex(AA));Y.splice(1,0,B+AA);E.Dom.setAttribute(y,F,A(Y.join(J)));G=true;}}}}else{}return G;},generateId:function(G,x){x=x||"yui-gen";var Y=function(y){if(y&&y.id){return y.id;}var z=x+YAHOO.env._id_counter++;if(y){if(y[e]&&y[e].getElementById(z)){return E.Dom.generateId(y,z+x);}y.id=z;}return z;};return E.Dom.batch(G,Y,E.Dom,true)||Y.apply(E.Dom,arguments);},isAncestor:function(Y,x){Y=E.Dom.get(Y);x=E.Dom.get(x);var G=false;if((Y&&x)&&(Y[l]&&x[l])){if(Y.contains&&Y!==x){G=Y.contains(x);}else{if(Y.compareDocumentPosition){G=!!(Y.compareDocumentPosition(x)&16);}}}else{}return G;},inDocument:function(G,Y){return E.Dom._inDoc(E.Dom.get(G),Y);},_inDoc:function(Y,x){var G=false;if(Y&&Y[C]){x=x||Y[e];G=E.Dom.isAncestor(x[v],Y);}else{}return G;},getElementsBy:function(Y,AF,AB,AD,y,AC,AE){AF=AF||"*";AB=(AB)?E.Dom.get(AB):null||K;if(!AB){return[];}var x=[],G=AB.getElementsByTagName(AF);for(var z=0,AA=G.length;z=8&&K.documentElement.hasAttribute){E.Dom.DOT_ATTRIBUTES.type=true;}})();YAHOO.util.Region=function(C,D,A,B){this.top=C;this.y=C;this[1]=C;this.right=D;this.bottom=A;this.left=B;this.x=B;this[0]=B; +this.width=this.right-this.left;this.height=this.bottom-this.top;};YAHOO.util.Region.prototype.contains=function(A){return(A.left>=this.left&&A.right<=this.right&&A.top>=this.top&&A.bottom<=this.bottom);};YAHOO.util.Region.prototype.getArea=function(){return((this.bottom-this.top)*(this.right-this.left));};YAHOO.util.Region.prototype.intersect=function(E){var C=Math.max(this.top,E.top),D=Math.min(this.right,E.right),A=Math.min(this.bottom,E.bottom),B=Math.max(this.left,E.left);if(A>=C&&D>=B){return new YAHOO.util.Region(C,D,A,B);}else{return null;}};YAHOO.util.Region.prototype.union=function(E){var C=Math.min(this.top,E.top),D=Math.max(this.right,E.right),A=Math.max(this.bottom,E.bottom),B=Math.min(this.left,E.left);return new YAHOO.util.Region(C,D,A,B);};YAHOO.util.Region.prototype.toString=function(){return("Region {"+"top: "+this.top+", right: "+this.right+", bottom: "+this.bottom+", left: "+this.left+", height: "+this.height+", width: "+this.width+"}");};YAHOO.util.Region.getRegion=function(D){var F=YAHOO.util.Dom.getXY(D),C=F[1],E=F[0]+D.offsetWidth,A=F[1]+D.offsetHeight,B=F[0];return new YAHOO.util.Region(C,E,A,B);};YAHOO.util.Point=function(A,B){if(YAHOO.lang.isArray(A)){B=A[1];A=A[0];}YAHOO.util.Point.superclass.constructor.call(this,B,A,B,A);};YAHOO.extend(YAHOO.util.Point,YAHOO.util.Region);(function(){var B=YAHOO.util,A="clientTop",F="clientLeft",J="parentNode",K="right",W="hasLayout",I="px",U="opacity",L="auto",D="borderLeftWidth",G="borderTopWidth",P="borderRightWidth",V="borderBottomWidth",S="visible",Q="transparent",N="height",E="width",H="style",T="currentStyle",R=/^width|height$/,O=/^(\d[.\d]*)+(em|ex|px|gd|rem|vw|vh|vm|ch|mm|cm|in|pt|pc|deg|rad|ms|s|hz|khz|%){1}?/i,M={get:function(X,Z){var Y="",a=X[T][Z];if(Z===U){Y=B.Dom.getStyle(X,U);}else{if(!a||(a.indexOf&&a.indexOf(I)>-1)){Y=a;}else{if(B.Dom.IE_COMPUTED[Z]){Y=B.Dom.IE_COMPUTED[Z](X,Z);}else{if(O.test(a)){Y=B.Dom.IE.ComputedStyle.getPixel(X,Z);}else{Y=a;}}}}return Y;},getOffset:function(Z,e){var b=Z[T][e],X=e.charAt(0).toUpperCase()+e.substr(1),c="offset"+X,Y="pixel"+X,a="",d;if(b==L){d=Z[c];if(d===undefined){a=0;}a=d;if(R.test(e)){Z[H][e]=d;if(Z[c]>d){a=d-(Z[c]-d);}Z[H][e]=L;}}else{if(!Z[H][Y]&&!Z[H][e]){Z[H][e]=b;}a=Z[H][Y];}return a+I;},getBorderWidth:function(X,Z){var Y=null;if(!X[T][W]){X[H].zoom=1;}switch(Z){case G:Y=X[A];break;case V:Y=X.offsetHeight-X.clientHeight-X[A];break;case D:Y=X[F];break;case P:Y=X.offsetWidth-X.clientWidth-X[F];break;}return Y+I;},getPixel:function(Y,X){var a=null,b=Y[T][K],Z=Y[T][X];Y[H][K]=Z;a=Y[H].pixelRight;Y[H][K]=b;return a+I;},getMargin:function(Y,X){var Z;if(Y[T][X]==L){Z=0+I;}else{Z=B.Dom.IE.ComputedStyle.getPixel(Y,X);}return Z;},getVisibility:function(Y,X){var Z;while((Z=Y[T])&&Z[X]=="inherit"){Y=Y[J];}return(Z)?Z[X]:S;},getColor:function(Y,X){return B.Dom.Color.toRGB(Y[T][X])||Q;},getBorderColor:function(Y,X){var Z=Y[T],a=Z[X]||Z.color;return B.Dom.Color.toRGB(B.Dom.Color.toHex(a));}},C={};C.top=C.right=C.bottom=C.left=C[E]=C[N]=M.getOffset;C.color=M.getColor;C[G]=C[P]=C[V]=C[D]=M.getBorderWidth;C.marginTop=C.marginRight=C.marginBottom=C.marginLeft=M.getMargin;C.visibility=M.getVisibility;C.borderColor=C.borderTopColor=C.borderRightColor=C.borderBottomColor=C.borderLeftColor=M.getBorderColor;B.Dom.IE_COMPUTED=C;B.Dom.IE_ComputedStyle=M;})();(function(){var C="toString",A=parseInt,B=RegExp,D=YAHOO.util;D.Dom.Color={KEYWORDS:{black:"000",silver:"c0c0c0",gray:"808080",white:"fff",maroon:"800000",red:"f00",purple:"800080",fuchsia:"f0f",green:"008000",lime:"0f0",olive:"808000",yellow:"ff0",navy:"000080",blue:"00f",teal:"008080",aqua:"0ff"},re_RGB:/^rgb\(([0-9]+)\s*,\s*([0-9]+)\s*,\s*([0-9]+)\)$/i,re_hex:/^#?([0-9A-F]{2})([0-9A-F]{2})([0-9A-F]{2})$/i,re_hex3:/([0-9A-F])/gi,toRGB:function(E){if(!D.Dom.Color.re_RGB.test(E)){E=D.Dom.Color.toHex(E);}if(D.Dom.Color.re_hex.exec(E)){E="rgb("+[A(B.$1,16),A(B.$2,16),A(B.$3,16)].join(", ")+")";}return E;},toHex:function(H){H=D.Dom.Color.KEYWORDS[H]||H;if(D.Dom.Color.re_RGB.exec(H)){var G=(B.$1.length===1)?"0"+B.$1:Number(B.$1),F=(B.$2.length===1)?"0"+B.$2:Number(B.$2),E=(B.$3.length===1)?"0"+B.$3:Number(B.$3);H=[G[C](16),F[C](16),E[C](16)].join("");}if(H.length<6){H=H.replace(D.Dom.Color.re_hex3,"$1$1");}if(H!=="transparent"&&H.indexOf("#")<0){H="#"+H;}return H.toLowerCase();}};}());YAHOO.register("dom",YAHOO.util.Dom,{version:"2.8.0r4",build:"2446"});/* +Copyright (c) 2009, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.net/yui/license.txt +version: 2.8.0r4 +*/ +(function(){YAHOO.util.Config=function(D){if(D){this.init(D);}};var B=YAHOO.lang,C=YAHOO.util.CustomEvent,A=YAHOO.util.Config;A.CONFIG_CHANGED_EVENT="configChanged";A.BOOLEAN_TYPE="boolean";A.prototype={owner:null,queueInProgress:false,config:null,initialConfig:null,eventQueue:null,configChangedEvent:null,init:function(D){this.owner=D;this.configChangedEvent=this.createEvent(A.CONFIG_CHANGED_EVENT);this.configChangedEvent.signature=C.LIST;this.queueInProgress=false;this.config={};this.initialConfig={};this.eventQueue=[];},checkBoolean:function(D){return(typeof D==A.BOOLEAN_TYPE);},checkNumber:function(D){return(!isNaN(D));},fireEvent:function(D,F){var E=this.config[D];if(E&&E.event){E.event.fire(F);}},addProperty:function(E,D){E=E.toLowerCase();this.config[E]=D;D.event=this.createEvent(E,{scope:this.owner});D.event.signature=C.LIST;D.key=E;if(D.handler){D.event.subscribe(D.handler,this.owner);}this.setProperty(E,D.value,true);if(!D.suppressEvent){this.queueProperty(E,D.value);}},getConfig:function(){var D={},F=this.config,G,E;for(G in F){if(B.hasOwnProperty(F,G)){E=F[G];if(E&&E.event){D[G]=E.value;}}}return D;},getProperty:function(D){var E=this.config[D.toLowerCase()];if(E&&E.event){return E.value;}else{return undefined;}},resetProperty:function(D){D=D.toLowerCase();var E=this.config[D];if(E&&E.event){if(this.initialConfig[D]&&!B.isUndefined(this.initialConfig[D])){this.setProperty(D,this.initialConfig[D]);return true;}}else{return false;}},setProperty:function(E,G,D){var F;E=E.toLowerCase();if(this.queueInProgress&&!D){this.queueProperty(E,G);return true;}else{F=this.config[E];if(F&&F.event){if(F.validator&&!F.validator(G)){return false;}else{F.value=G;if(!D){this.fireEvent(E,G);this.configChangedEvent.fire([E,G]);}return true;}}else{return false;}}},queueProperty:function(S,P){S=S.toLowerCase();var R=this.config[S],K=false,J,G,H,I,O,Q,F,M,N,D,L,T,E;if(R&&R.event){if(!B.isUndefined(P)&&R.validator&&!R.validator(P)){return false;}else{if(!B.isUndefined(P)){R.value=P;}else{P=R.value;}K=false;J=this.eventQueue.length;for(L=0;L0){G=F-1;do{D=E.subscribers[G];if(D&&D.obj==I&&D.fn==H){return true;}}while(G--);}return false;};YAHOO.lang.augmentProto(A,YAHOO.util.EventProvider);}());(function(){YAHOO.widget.Module=function(R,Q){if(R){this.init(R,Q);}else{}};var F=YAHOO.util.Dom,D=YAHOO.util.Config,N=YAHOO.util.Event,M=YAHOO.util.CustomEvent,G=YAHOO.widget.Module,I=YAHOO.env.ua,H,P,O,E,A={"BEFORE_INIT":"beforeInit","INIT":"init","APPEND":"append","BEFORE_RENDER":"beforeRender","RENDER":"render","CHANGE_HEADER":"changeHeader","CHANGE_BODY":"changeBody","CHANGE_FOOTER":"changeFooter","CHANGE_CONTENT":"changeContent","DESTROY":"destroy","BEFORE_SHOW":"beforeShow","SHOW":"show","BEFORE_HIDE":"beforeHide","HIDE":"hide"},J={"VISIBLE":{key:"visible",value:true,validator:YAHOO.lang.isBoolean},"EFFECT":{key:"effect",suppressEvent:true,supercedes:["visible"]},"MONITOR_RESIZE":{key:"monitorresize",value:true},"APPEND_TO_DOCUMENT_BODY":{key:"appendtodocumentbody",value:false}};G.IMG_ROOT=null;G.IMG_ROOT_SSL=null;G.CSS_MODULE="yui-module";G.CSS_HEADER="hd";G.CSS_BODY="bd";G.CSS_FOOTER="ft";G.RESIZE_MONITOR_SECURE_URL="javascript:false;";G.RESIZE_MONITOR_BUFFER=1;G.textResizeEvent=new M("textResize");G.forceDocumentRedraw=function(){var Q=document.documentElement;if(Q){Q.className+=" ";Q.className=YAHOO.lang.trim(Q.className);}};function L(){if(!H){H=document.createElement("div");H.innerHTML=('
    '+'
    ');P=H.firstChild;O=P.nextSibling;E=O.nextSibling;}return H;}function K(){if(!P){L();}return(P.cloneNode(false));}function B(){if(!O){L();}return(O.cloneNode(false));}function C(){if(!E){L();}return(E.cloneNode(false));}G.prototype={constructor:G,element:null,header:null,body:null,footer:null,id:null,imageRoot:G.IMG_ROOT,initEvents:function(){var Q=M.LIST; +this.beforeInitEvent=this.createEvent(A.BEFORE_INIT);this.beforeInitEvent.signature=Q;this.initEvent=this.createEvent(A.INIT);this.initEvent.signature=Q;this.appendEvent=this.createEvent(A.APPEND);this.appendEvent.signature=Q;this.beforeRenderEvent=this.createEvent(A.BEFORE_RENDER);this.beforeRenderEvent.signature=Q;this.renderEvent=this.createEvent(A.RENDER);this.renderEvent.signature=Q;this.changeHeaderEvent=this.createEvent(A.CHANGE_HEADER);this.changeHeaderEvent.signature=Q;this.changeBodyEvent=this.createEvent(A.CHANGE_BODY);this.changeBodyEvent.signature=Q;this.changeFooterEvent=this.createEvent(A.CHANGE_FOOTER);this.changeFooterEvent.signature=Q;this.changeContentEvent=this.createEvent(A.CHANGE_CONTENT);this.changeContentEvent.signature=Q;this.destroyEvent=this.createEvent(A.DESTROY);this.destroyEvent.signature=Q;this.beforeShowEvent=this.createEvent(A.BEFORE_SHOW);this.beforeShowEvent.signature=Q;this.showEvent=this.createEvent(A.SHOW);this.showEvent.signature=Q;this.beforeHideEvent=this.createEvent(A.BEFORE_HIDE);this.beforeHideEvent.signature=Q;this.hideEvent=this.createEvent(A.HIDE);this.hideEvent.signature=Q;},platform:function(){var Q=navigator.userAgent.toLowerCase();if(Q.indexOf("windows")!=-1||Q.indexOf("win32")!=-1){return"windows";}else{if(Q.indexOf("macintosh")!=-1){return"mac";}else{return false;}}}(),browser:function(){var Q=navigator.userAgent.toLowerCase();if(Q.indexOf("opera")!=-1){return"opera";}else{if(Q.indexOf("msie 7")!=-1){return"ie7";}else{if(Q.indexOf("msie")!=-1){return"ie";}else{if(Q.indexOf("safari")!=-1){return"safari";}else{if(Q.indexOf("gecko")!=-1){return"gecko";}else{return false;}}}}}}(),isSecure:function(){if(window.location.href.toLowerCase().indexOf("https")===0){return true;}else{return false;}}(),initDefaultConfig:function(){this.cfg.addProperty(J.VISIBLE.key,{handler:this.configVisible,value:J.VISIBLE.value,validator:J.VISIBLE.validator});this.cfg.addProperty(J.EFFECT.key,{suppressEvent:J.EFFECT.suppressEvent,supercedes:J.EFFECT.supercedes});this.cfg.addProperty(J.MONITOR_RESIZE.key,{handler:this.configMonitorResize,value:J.MONITOR_RESIZE.value});this.cfg.addProperty(J.APPEND_TO_DOCUMENT_BODY.key,{value:J.APPEND_TO_DOCUMENT_BODY.value});},init:function(V,U){var S,W;this.initEvents();this.beforeInitEvent.fire(G);this.cfg=new D(this);if(this.isSecure){this.imageRoot=G.IMG_ROOT_SSL;}if(typeof V=="string"){S=V;V=document.getElementById(V);if(!V){V=(L()).cloneNode(false);V.id=S;}}this.id=F.generateId(V);this.element=V;W=this.element.firstChild;if(W){var R=false,Q=false,T=false;do{if(1==W.nodeType){if(!R&&F.hasClass(W,G.CSS_HEADER)){this.header=W;R=true;}else{if(!Q&&F.hasClass(W,G.CSS_BODY)){this.body=W;Q=true;}else{if(!T&&F.hasClass(W,G.CSS_FOOTER)){this.footer=W;T=true;}}}}}while((W=W.nextSibling));}this.initDefaultConfig();F.addClass(this.element,G.CSS_MODULE);if(U){this.cfg.applyConfig(U,true);}if(!D.alreadySubscribed(this.renderEvent,this.cfg.fireQueue,this.cfg)){this.renderEvent.subscribe(this.cfg.fireQueue,this.cfg,true);}this.initEvent.fire(G);},initResizeMonitor:function(){var R=(I.gecko&&this.platform=="windows");if(R){var Q=this;setTimeout(function(){Q._initResizeMonitor();},0);}else{this._initResizeMonitor();}},_initResizeMonitor:function(){var Q,S,U;function W(){G.textResizeEvent.fire();}if(!I.opera){S=F.get("_yuiResizeMonitor");var V=this._supportsCWResize();if(!S){S=document.createElement("iframe");if(this.isSecure&&G.RESIZE_MONITOR_SECURE_URL&&I.ie){S.src=G.RESIZE_MONITOR_SECURE_URL;}if(!V){U=["