Automatically exported from code.google.com/p/planningalerts
25개 이상의 토픽을 선택하실 수 없습니다. Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

Exmoor.py 2.3 KiB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970
  1. import urllib2
  2. import urllib
  3. import urlparse
  4. import datetime, time
  5. import cgi
  6. from BeautifulSoup import BeautifulSoup
  7. from PlanningUtils import PlanningApplication, \
  8. PlanningAuthorityResults, \
  9. getPostcodeFromText
  10. search_date_format = "%d+%b+%Y"
  11. received_date_format = "%d %b %Y"
  12. class ExmoorParser:
  13. def __init__(self, *args):
  14. self.authority_name = "Exmoor National Park"
  15. self.authority_short_name = "Exmoor"
  16. self.base_url = "http://www.exmoor-nationalpark.gov.uk/planning_weekly_list.htm?weeklylist=%s"
  17. self._results = PlanningAuthorityResults(self.authority_name, self.authority_short_name)
  18. def getResultsByDayMonthYear(self, day, month, year):
  19. search_day = datetime.date(year, month, day)
  20. response = urllib2.urlopen(self.base_url %(search_day.strftime(search_date_format)))
  21. soup = BeautifulSoup(response.read())
  22. # The first <tr> contains headers
  23. trs = soup.table.findAll("tr")[1:]
  24. for tr in trs:
  25. application = PlanningApplication()
  26. tds = tr.findAll("td")
  27. application.date_received = datetime.datetime.strptime(tds[0].string, received_date_format).date()
  28. application.info_url = urllib.unquote(urllib.quote_plus(urlparse.urljoin(self.base_url, tds[1].a['href'])))
  29. application.council_reference = tds[1].a.string.strip()
  30. application.address = tds[2].a.string.strip()
  31. application.postcode = getPostcodeFromText(application.address)
  32. # Now fetch the info url
  33. info_response = urllib.urlopen(application.info_url)
  34. info_soup = BeautifulSoup(info_response.read())
  35. application.description = info_soup.find(text="Proposal:").findNext("td").string.strip()
  36. try:
  37. application.comment_url = urlparse.urljoin(self.base_url, info_soup.find(text="Comment").parent['href'])
  38. except:
  39. application.comment_url = "No Comments"
  40. self._results.addApplication(application)
  41. return self._results
  42. def getResults(self, day, month, year):
  43. return self.getResultsByDayMonthYear(int(day), int(month), int(year)).displayXML()
  44. if __name__ == '__main__':
  45. parser = ExmoorParser()
  46. print parser.getResults(1,8,2008)