Automatically exported from code.google.com/p/planningalerts
Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. import urllib2
  2. import urllib
  3. import urlparse
  4. import datetime, time
  5. import cgi
  6. from BeautifulSoup import BeautifulSoup
  7. from PlanningUtils import PlanningApplication, \
  8. PlanningAuthorityResults, \
  9. getPostcodeFromText
  10. date_format = "%d/%m/%Y"
  11. class KensingtonParser:
  12. def __init__(self, *args):
  13. self.authority_name = "The Royal Borough of Kensington and Chelsea"
  14. self.authority_short_name = "Kensington and Chelsea"
  15. self.base_url = "http://www.rbkc.gov.uk/Planning/scripts/weeklyresults.asp"
  16. self._results = PlanningAuthorityResults(self.authority_name, self.authority_short_name)
  17. def getResultsByDayMonthYear(self, day, month, year):
  18. search_day = datetime.date(year, month, day)
  19. # We want the sunday of the week being searched for.
  20. # (sunday is at the end of the week).
  21. friday = search_day - datetime.timedelta(search_day.weekday()) + datetime.timedelta(4)
  22. # Not using urllib.urlencode as it insists on turning the "+" into "%2B"
  23. post_data = "WeekEndDate=%d%%2F%d%%2F%d&order=Received+Date&submit=search" %(friday.day, friday.month, friday.year)
  24. # Now get the search page
  25. response = urllib2.urlopen(self.base_url, post_data)
  26. soup = BeautifulSoup(response.read())
  27. trs = soup.find("table", summary="Planning Application search results table").findAll("tr")[1:]
  28. for tr in trs:
  29. application = PlanningApplication()
  30. tds = tr.findAll("td")
  31. # Not sure why these are entities. We'll convert them back.
  32. application.council_reference = tds[0].a.contents[1].strip().replace("/", "/")
  33. application.info_url = urlparse.urljoin(self.base_url, tds[0].a['href'])
  34. application.comment_url = application.info_url
  35. application.date_received = datetime.datetime(*(time.strptime(tds[1].string.strip(), date_format)[0:6]))
  36. application.address = tds[2].string.strip()
  37. application.postcode = getPostcodeFromText(application.address)
  38. application.description = tds[3].string.strip()
  39. self._results.addApplication(application)
  40. return self._results
  41. def getResults(self, day, month, year):
  42. return self.getResultsByDayMonthYear(int(day), int(month), int(year)).displayXML()
  43. if __name__ == '__main__':
  44. parser = KensingtonParser()
  45. print parser.getResults(11,6,2008)