|
- require 'uk_planning_scraper'
- require 'scraperwiki'
-
- keyword_searches = ['inlink', 'bt phone kiosk', 'communication hub']
- authorities = UKPlanningScraper::Authority.all
-
- authorities.each_with_index do |authority, i|
- puts "#{i + 1} of #{authorities.size}: #{authority.name}"
- keyword_searches.each_with_index do |search, j|
- puts "Scrape #{j + 1} of #{keyword_searches.size}: keywords: #{search}"
-
- begin
- applications = authority.validated_days(ENV['MORPH_DAYS'].to_i). \
- keywords(search).scrape
-
- applications.each do |application|
- # Backend keyword search is weak
- unless application[:description].match(/chainlink/i)
- ScraperWiki.save_sqlite(
- [:authority_name, :council_reference],
- application,
- 'applications')
- end
- end
- puts "#{authority.name}: #{applications.size} application(s) saved."
- rescue StandardError => e
- puts "Error: #{e}"
- end
- end
- end
|