require 'uk_planning_scraper' require 'scraperwiki' auths = UKPlanningScraper::Authority.all scrapes = [ { validated_days: ENV['MORPH_DAYS'].to_i, keywords: 'inlink'}, { validated_days: ENV['MORPH_DAYS'].to_i, keywords: 'bt phone kiosk'} ] auths.each_with_index do |auth, i| puts "#{i + 1} of #{auths.size}: #{auth.name}" scrapes.each_with_index do |scrape, j| puts "Scrape #{j + 1} of #{scrapes.size}: keywords: #{scrape[:keywords]}" begin apps = auth.scrape(scrape) apps.each do |app| unless app[:description].match(/chainlink/i) # Backend keyword search is weak ScraperWiki.save_sqlite([:authority_name, :council_reference], app, 'applications') end end puts "#{auth.name}: #{apps.size} application(s) saved." rescue StandardError => e puts "Error: #{e}" end end end