Created
April 14, 2010 03:44
-
-
Save rafapolo/365428 to your computer and use it in GitHub Desktop.
Get Friweb
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/ruby | |
# encoding: utf8 | |
# author: Rafael Polo | |
# created_at 12.abr.2010 | |
$KCODE='u' | |
require 'open-uri' | |
require 'timeout' | |
require 'rubygems' | |
require 'hpricot' | |
require 'htmlentities' | |
require 'iconv' | |
require "active_support" | |
class GetFriweb | |
@dados = [] | |
@friweb_url = "http://www.friweb.com.br/novafriburgo/" | |
REGEX_EMAIL = /href="mailto\:([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4})"/ | |
def self.crawleia | |
hospedagem = {:url=>@friweb_url+"hoteis_e_pousadas_nova_friburgo,,$.html", :pages=>8, :nome=>"Hospedagem"} | |
gastronomia = {:url=>@friweb_url+"restaurantes_nova_friburgo,,$.html", :pages=>9, :nome=>"Gastronomia"} | |
servicos = {:url=>@friweb_url+"servicos_turisticos_nova_friburgo,,$.html", :pages=>19, :nome=>"Serviços"} | |
turismo = {:url=>@friweb_url+"pontos_turisticos_nova_friburgo,,$.html", :pages=>3, :nome=>"Turismo"} | |
categorias = [hospedagem, gastronomia, servicos, turismo] | |
categorias.each do |categoria| | |
nome = "# "+categoria[:nome].to_s+"\n" | |
puts nome | |
@dados << nome | |
categoria[:pages].times do |x| | |
x=x+1 | |
pega_parceiro(categoria[:url].gsub("$page", x.to_s)) | |
end | |
end | |
# salva | |
File.open('resultado.txt', 'w') do |f| | |
@dados.sort!.each do |dado| | |
f.puts dado | |
end | |
end | |
end | |
def self.pega_parceiro(url) | |
coder = HTMLEntities.new | |
page = load_url(url) | |
if page | |
doc = parse(page) | |
doc.search("//td[@width=430]").collect do |r| | |
nome = coder.decode((r/"b").inner_html) | |
nome.gsub!(/\w+/) do |word| | |
word.mb_chars.capitalize | |
end | |
puts nome | |
parceiro_page = load_url(@friweb_url+(r/"a").attr('href')) | |
doc_in = parse(parceiro_page) | |
mail = "" | |
doc_in.search("//td[@width=310]").collect do |r| | |
mail = r.inner_html.scan(REGEX_EMAIL).to_s | |
puts mail | |
end | |
@dados << nome + " - " + mail | |
end | |
end | |
end | |
def self.parse(page) | |
Hpricot(Iconv.conv('utf-8', page.charset, page)) | |
end | |
def self.load_url(url) | |
response="" | |
begin | |
timeout(8) do | |
uri = URI.parse(URI.encode(url)) | |
response = uri.read if uri | |
end | |
rescue Exception=>error | |
response = false | |
puts "Erro: #{error}\n\n" | |
end | |
response | |
end | |
end | |
GetFriweb.crawleia |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment