| OLD | NEW |
| 1 # Copyright 2008 Google Inc. | 1 # Copyright 2008 Google Inc. |
| 2 # | 2 # |
| 3 # Licensed under the Apache License, Version 2.0 (the "License"); | 3 # Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 # you may not use this file except in compliance with the License. | 4 # you may not use this file except in compliance with the License. |
| 5 # You may obtain a copy of the License at | 5 # You may obtain a copy of the License at |
| 6 # | 6 # |
| 7 # http://www.apache.org/licenses/LICENSE-2.0 | 7 # http://www.apache.org/licenses/LICENSE-2.0 |
| 8 # | 8 # |
| 9 # Unless required by applicable law or agreed to in writing, software | 9 # Unless required by applicable law or agreed to in writing, software |
| 10 # distributed under the License is distributed on an "AS IS" BASIS, | 10 # distributed under the License is distributed on an "AS IS" BASIS, |
| 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 # See the License for the specific language governing permissions and | 12 # See the License for the specific language governing permissions and |
| 13 # limitations under the License. | 13 # limitations under the License. |
| 14 | 14 |
| 15 """Views for Rietveld.""" | 15 """Views for Rietveld.""" |
| 16 | 16 |
| 17 | 17 |
| 18 import binascii | 18 import binascii |
| 19 import copy |
| 19 import datetime | 20 import datetime |
| 20 import email # see incoming_mail() | 21 import email # see incoming_mail() |
| 21 import email.utils | 22 import email.utils |
| 23 import difflib |
| 22 import itertools | 24 import itertools |
| 23 import json | 25 import json |
| 24 import logging | 26 import logging |
| 25 import md5 | 27 import md5 |
| 26 import mimetypes | 28 import mimetypes |
| 27 import os | 29 import os |
| 28 import random | 30 import random |
| 29 import re | 31 import re |
| 30 import tarfile | 32 import tarfile |
| 31 import tempfile | 33 import tempfile |
| 32 import urllib | 34 import urllib |
| 33 from cStringIO import StringIO | 35 from cStringIO import StringIO |
| 34 from xml.etree import ElementTree | 36 from xml.etree import ElementTree |
| 37 from upload import COMMIT_MSG |
| 35 | 38 |
| 36 from google.appengine.api import mail | 39 from google.appengine.api import mail |
| 37 from google.appengine.api import memcache | 40 from google.appengine.api import memcache |
| 38 from google.appengine.api import taskqueue | 41 from google.appengine.api import taskqueue |
| 39 from google.appengine.api import users | 42 from google.appengine.api import users |
| 40 from google.appengine.api import urlfetch | 43 from google.appengine.api import urlfetch |
| 41 from google.appengine.api import xmpp | 44 from google.appengine.api import xmpp |
| 42 from google.appengine.ext import db | 45 from google.appengine.ext import db |
| 43 from google.appengine.runtime import DeadlineExceededError | 46 from google.appengine.runtime import DeadlineExceededError |
| 44 from google.appengine.runtime import apiproxy_errors | 47 from google.appengine.runtime import apiproxy_errors |
| (...skipping 1691 matching lines...) | | Loading... |
| 1736 | 1739 |
| 1737 ps_key = db.Key.from_path( | 1740 ps_key = db.Key.from_path( |
| 1738 models.PatchSet.kind(), | 1741 models.PatchSet.kind(), |
| 1739 db.allocate_ids(db.Key.from_path(models.PatchSet.kind(), 1, | 1742 db.allocate_ids(db.Key.from_path(models.PatchSet.kind(), 1, |
| 1740 parent=issue.key()), 1)[0], | 1743 parent=issue.key()), 1)[0], |
| 1741 parent=issue.key()) | 1744 parent=issue.key()) |
| 1742 | 1745 |
| 1743 patchset = models.PatchSet(issue=issue, data=data, url=url, key=ps_key) | 1746 patchset = models.PatchSet(issue=issue, data=data, url=url, key=ps_key) |
| 1744 patchset.put() | 1747 patchset.put() |
| 1745 | 1748 |
| 1749 commit_patch = _create_commit_message_patch(patchset, |
| 1750 patchset.issue.description) |
| 1751 commit_patch.put() |
| 1752 |
| 1746 if not separate_patches: | 1753 if not separate_patches: |
| 1747 try: | 1754 try: |
| 1748 patches = engine.ParsePatchSet(patchset) | 1755 patches = engine.ParsePatchSet(patchset) |
| 1749 except: | 1756 except: |
| 1750 # catch all exceptions happening in engine.ParsePatchSet, | 1757 # catch all exceptions happening in engine.ParsePatchSet, |
| 1751 # engine.SplitPatch. With malformed diffs a variety of exceptions could | 1758 # engine.SplitPatch. With malformed diffs a variety of exceptions could |
| 1752 # happen there. | 1759 # happen there. |
| 1753 logging.exception('Exception during patch parsing') | 1760 logging.exception('Exception during patch parsing') |
| 1754 patches = [] | 1761 patches = [] |
| 1755 if not patches: | 1762 if not patches: |
| (...skipping 66 matching lines...) | | Loading... |
| 1822 @xsrf_required | 1829 @xsrf_required |
| 1823 def add(request): | 1830 def add(request): |
| 1824 """/<issue>/add - Add a new PatchSet to an existing Issue.""" | 1831 """/<issue>/add - Add a new PatchSet to an existing Issue.""" |
| 1825 issue = request.issue | 1832 issue = request.issue |
| 1826 form = AddForm(request.POST, request.FILES) | 1833 form = AddForm(request.POST, request.FILES) |
| 1827 if not _add_patchset_from_form(request, issue, form): | 1834 if not _add_patchset_from_form(request, issue, form): |
| 1828 return show(request, issue.key().id(), form) | 1835 return show(request, issue.key().id(), form) |
| 1829 return HttpResponseRedirect(reverse(show, args=[issue.key().id()])) | 1836 return HttpResponseRedirect(reverse(show, args=[issue.key().id()])) |
| 1830 | 1837 |
| 1831 | 1838 |
| 1839 def _create_commit_message_patch(patchset, description): |
| 1840 diff = "" |
| 1841 udiff = difflib.unified_diff("", description.splitlines(1),· |
| 1842 fromfile="/dev/null", tofile=COMMIT_MSG) |
| 1843 for line in udiff: |
| 1844 diff += line |
| 1845 |
| 1846 patch_key = db.Key.from_path( |
| 1847 models.Patch.kind(), |
| 1848 db.allocate_ids(db.Key.from_path(models.Patch.kind(), 1, |
| 1849 parent=patchset.key()), 1)[0], |
| 1850 parent=patchset.key()) |
| 1851 commit_patch = models.Patch(patchset=patchset, text=utils.to_dbtext(diff),· |
| 1852 filename=COMMIT_MSG, key=patch_key, |
| 1853 no_base_file=True) |
| 1854 return commit_patch |
| 1855 |
| 1856 |
| 1832 def _add_patchset_from_form(request, issue, form, message_key='message', | 1857 def _add_patchset_from_form(request, issue, form, message_key='message', |
| 1833 emails_add_only=False): | 1858 emails_add_only=False): |
| 1834 """Helper for add() and upload().""" | 1859 """Helper for add() and upload().""" |
| 1835 if form.is_valid(): | 1860 if form.is_valid(): |
| 1836 data_url = _get_data_url(form) | 1861 data_url = _get_data_url(form) |
| 1837 if not form.is_valid(): | 1862 if not form.is_valid(): |
| 1838 return None | 1863 return None |
| 1839 account = models.Account.get_account_for_user(request.user) | 1864 account = models.Account.get_account_for_user(request.user) |
| 1840 if account.blocked: | 1865 if account.blocked: |
| 1841 return None | 1866 return None |
| 1842 if not issue.edit_allowed: | 1867 if not issue.edit_allowed: |
| 1843 # This check is done at each call site but check again as a safety measure. | 1868 # This check is done at each call site but check again as a safety measure. |
| 1844 return None | 1869 return None |
| 1845 data, url, separate_patches = data_url | 1870 data, url, separate_patches = data_url |
| 1846 message = form.cleaned_data[message_key] | 1871 message = form.cleaned_data[message_key] |
| 1847 ps_key = db.Key.from_path( | 1872 ps_key = db.Key.from_path( |
| 1848 models.PatchSet.kind(), | 1873 models.PatchSet.kind(), |
| 1849 db.allocate_ids(db.Key.from_path(models.PatchSet.kind(), 1, | 1874 db.allocate_ids(db.Key.from_path(models.PatchSet.kind(), 1, |
| 1850 parent=issue.key()), 1)[0], | 1875 parent=issue.key()), 1)[0], |
| 1851 parent=issue.key()) | 1876 parent=issue.key()) |
| 1852 patchset = models.PatchSet(issue=issue, message=message, data=data, url=url, | 1877 patchset = models.PatchSet(issue=issue, message=message, data=data, url=url, |
| 1853 key=ps_key) | 1878 key=ps_key) |
| 1854 patchset.put() | 1879 patchset.put() |
| 1855 | 1880 |
| 1881 commit_patch = _create_commit_message_patch(patchset, |
| 1882 patchset.issue.description) |
| 1883 commit_patch.put() |
| 1884 |
| 1856 if not separate_patches: | 1885 if not separate_patches: |
| 1857 try: | 1886 try: |
| 1858 patches = engine.ParsePatchSet(patchset) | 1887 patches = engine.ParsePatchSet(patchset) |
| 1859 except: | 1888 except: |
| 1860 logging.exception('Exception during patchset parsing') | 1889 logging.exception('Exception during patchset parsing') |
| 1861 patches = [] | 1890 patches = [] |
| 1862 if not patches: | 1891 if not patches: |
| 1863 patchset.delete() | 1892 patchset.delete() |
| 1864 errkey = url and 'url' or 'data' | 1893 errkey = url and 'url' or 'data' |
| 1865 form.errors[errkey] = ['Patch set contains no recognizable patches'] | 1894 form.errors[errkey] = ['Patch set contains no recognizable patches'] |
| (...skipping 376 matching lines...) | | Loading... |
| 2242 | 2271 |
| 2243 if form.is_valid() and not issue.local_base: | 2272 if form.is_valid() and not issue.local_base: |
| 2244 base = form.get_base() | 2273 base = form.get_base() |
| 2245 | 2274 |
| 2246 if not form.is_valid(): | 2275 if not form.is_valid(): |
| 2247 return respond(request, 'edit.html', {'issue': issue, 'form': form}) | 2276 return respond(request, 'edit.html', {'issue': issue, 'form': form}) |
| 2248 cleaned_data = form.cleaned_data | 2277 cleaned_data = form.cleaned_data |
| 2249 | 2278 |
| 2250 was_closed = issue.closed | 2279 was_closed = issue.closed |
| 2251 issue.subject = cleaned_data['subject'] | 2280 issue.subject = cleaned_data['subject'] |
| 2281 old_description = issue.description |
| 2252 issue.description = cleaned_data['description'] | 2282 issue.description = cleaned_data['description'] |
| 2283 · |
| 2284 # If the description was updated, it would get out of sync with the special |
| 2285 # COMMIT_MSG file in the last pastchset. So we clone the last patchset, |
| 2286 # modulo a new COMMIT_MSG patch.· |
| 2287 if issue.description != old_description: |
| 2288 |
| 2289 def clone_without_key(obj, **extra_args): |
| 2290 """ Clones an object, without copying the key """ |
| 2291 klass = obj.__class__ |
| 2292 props = {} |
| 2293 for k, v in klass.properties().iteritems(): |
| 2294 if not (type(v) == db.DateTimeProperty and |
| 2295 (getattr(v, 'auto_now') or getattr(v, 'auto_now_add'))): |
| 2296 if type(v) == db.ReferenceProperty: |
| 2297 value = getattr(klass, k).get_value_for_datastore(obj) |
| 2298 else: |
| 2299 value = v.__get__(obj, klass) |
| 2300 props[k] = value |
| 2301 props.update(extra_args)···· |
| 2302 return klass(**props)···· |
| 2303 |
| 2304 # Create a new patchset from the last one. |
| 2305 patchsets = list(issue.patchset_set.order('created')) |
| 2306 last_patchset = patchsets[-1] |
| 2307 new_patchset_msg = 'Auto-generated patchset by description update.' |
| 2308 new_patchset = clone_without_key(last_patchset, parent=issue, |
| 2309 message=new_patchset_msg) |
| 2310 new_patchset.put() |
| 2311 · |
| 2312 # Add the new commit message patch. |
| 2313 commit_patch = _create_commit_message_patch(new_patchset, issue.description) |
| 2314 commit_patch.put() |
| 2315 |
| 2316 # And copy all the patches over from last patchset. |
| 2317 for patch in list(last_patchset.patch_set): |
| 2318 # Skip the old commit message, since we just created a new one. |
| 2319 if patch.filename == COMMIT_MSG: |
| 2320 continue |
| 2321 new_patch = clone_without_key(patch, parent=new_patchset, |
| 2322 patchset=new_patchset) |
| 2323 new_patch.put() |
| 2324 |
| 2253 issue.closed = cleaned_data['closed'] | 2325 issue.closed = cleaned_data['closed'] |
| 2254 issue.private = cleaned_data.get('private', False) | 2326 issue.private = cleaned_data.get('private', False) |
| 2255 base_changed = (issue.base != base) | 2327 base_changed = (issue.base != base) |
| 2256 issue.base = base | 2328 issue.base = base |
| 2257 issue.reviewers = reviewers | 2329 issue.reviewers = reviewers |
| 2258 issue.cc = cc | 2330 issue.cc = cc |
| 2259 if base_changed: | 2331 if base_changed: |
| 2260 for patchset in issue.patchset_set: | 2332 for patchset in issue.patchset_set: |
| 2261 db.run_in_transaction(_delete_cached_contents, list(patchset.patch_set)) | 2333 db.run_in_transaction(_delete_cached_contents, list(patchset.patch_set)) |
| 2262 issue.calculate_updates_for() | 2334 issue.calculate_updates_for() |
| (...skipping 173 matching lines...) | | Loading... |
| 2436 info = tarfile.TarInfo(prefix + patch.filename) | 2508 info = tarfile.TarInfo(prefix + patch.filename) |
| 2437 info.size = len(data) | 2509 info.size = len(data) |
| 2438 # TODO(adonovan): set SYMTYPE/0755 when Rietveld supports symlinks. | 2510 # TODO(adonovan): set SYMTYPE/0755 when Rietveld supports symlinks. |
| 2439 info.type = tarfile.REGTYPE | 2511 info.type = tarfile.REGTYPE |
| 2440 info.mode = 0644 | 2512 info.mode = 0644 |
| 2441 delta = request.patchset.modified - datetime.datetime(1970, 1, 1) # datetim
e->time_t | 2513 delta = request.patchset.modified - datetime.datetime(1970, 1, 1) # datetim
e->time_t |
| 2442 info.mtime = int(delta.days * 86400 + delta.seconds) | 2514 info.mtime = int(delta.days * 86400 + delta.seconds) |
| 2443 tar.addfile(info, fileobj=StringIO(data)) | 2515 tar.addfile(info, fileobj=StringIO(data)) |
| 2444 | 2516 |
| 2445 for patch in patches: | 2517 for patch in patches: |
| 2518 # Don't include special commit message in tarball. |
| 2519 if patch.filename == COMMIT_MSG: |
| 2520 continue |
| 2446 if not patch.no_base_file: | 2521 if not patch.no_base_file: |
| 2447 try: | 2522 try: |
| 2448 add_entry('a/', patch.get_content()) # before | 2523 add_entry('a/', patch.get_content()) # before |
| 2449 except FetchError: # I/O problem? | 2524 except FetchError: # I/O problem? |
| 2450 logging.exception('tarball: patch(%s, %s).get_content failed' % | 2525 logging.exception('tarball: patch(%s, %s).get_content failed' % |
| 2451 (patch.key().id(), patch.filename())) | 2526 (patch.key().id(), patch.filename())) |
| 2452 try: | 2527 try: |
| 2453 add_entry('b/', patch.get_patched_content()) # after | 2528 add_entry('b/', patch.get_patched_content()) # after |
| 2454 except FetchError: # file deletion? I/O problem? | 2529 except FetchError: # file deletion? I/O problem? |
| 2455 logging.exception('tarball: patch(%s, %s).get_patched_content failed' % | 2530 logging.exception('tarball: patch(%s, %s).get_patched_content failed' % |
| (...skipping 2040 matching lines...) | | Loading... |
| 4496 if form.is_valid(): | 4571 if form.is_valid(): |
| 4497 client_id = form.cleaned_data['client_id'] | 4572 client_id = form.cleaned_data['client_id'] |
| 4498 client_secret = form.cleaned_data['client_secret'] | 4573 client_secret = form.cleaned_data['client_secret'] |
| 4499 additional_client_ids = form.cleaned_data['additional_client_ids'] | 4574 additional_client_ids = form.cleaned_data['additional_client_ids'] |
| 4500 auth_utils.SecretKey.set_config(client_id, client_secret, | 4575 auth_utils.SecretKey.set_config(client_id, client_secret, |
| 4501 additional_client_ids) | 4576 additional_client_ids) |
| 4502 return HttpResponseRedirect(reverse(set_client_id_and_secret)) | 4577 return HttpResponseRedirect(reverse(set_client_id_and_secret)) |
| 4503 else: | 4578 else: |
| 4504 form = ClientIDAndSecretForm() | 4579 form = ClientIDAndSecretForm() |
| 4505 return respond(request, 'set_client_id_and_secret.html', {'form': form}) | 4580 return respond(request, 'set_client_id_and_secret.html', {'form': form}) |
| OLD | NEW |