Skip to content

Instantly share code, notes, and snippets.

@davidmoremad
Last active January 21, 2019 16:46
Show Gist options
  • Select an option

  • Save davidmoremad/79cad1a33a13af99fad4fbf2bb0b0873 to your computer and use it in GitHub Desktop.

Select an option

Save davidmoremad/79cad1a33a13af99fad4fbf2bb0b0873 to your computer and use it in GitHub Desktop.
Buscados de series y peliculas en Megadede.com
#!/usr/local/bin/python3
# -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
from textwrap import wrap
from urllib.parse import quote
MEGADEDE_URL = "https://www.megadede.com/"
MEGADEDE_COOKIE = '__cfduid=db0202dcec05a598362d8e5c72b10df4a1548017684; PHPSESSID=0ip95uqu80t07v85e1rnu9v8i2; popshown2=1; _ga=GA1.2.522276160.1548017685; _gid=GA1.2.166267043.1548017685; megadede-sess=eyJpdiI6InM3bndyNEpuUDNOYjNVOUtaOUhcLzJBPT0iLCJ2YWx1ZSI6IjBPXC9xSkFRUm8rMnM4c0k3djIrTWFYbEQzNW0wVVBCempXMlwveGFiOWxVanhpUFl1TDBXR0REcmN0RkZERDFVeiIsIm1hYyI6IjM1YjA2ZDZlYjFiNGRkYmZmNjdiYzEyYzQ1Mjk0Mzk0ZDIwYTY0OGY4YzdlNDQ2MTM2NTA1ODUxMjA0NTJkNzkifQ%3D%3D; _gat=1; XSRF-TOKEN=eyJpdiI6Im9PbGpoRUoxWjY2QlpnbWRkVXRGTEE9PSIsInZhbHVlIjoiTVQ4T2RybGJIdks2TWcwTTFFZWVyKzlsalp2XC9BNWZSUXE0UHZzUUFkaG8wUVNcL1NuODRjQTVNZCtcL21CV2w2TGZSeHQyMnF5aTdMdTFFU1dNcWprNVE9PSIsIm1hYyI6IjE4ZGUzYjFiMGQzNTgzMGUzMjExZDEzNTI2ZmIzNmQwMjMzZDY0Y2RhMTk3YWVkNTQ0MGJmMjE2MDVlOWZjMGEifQ%3D%3D; cakephp_session=eyJpdiI6IkJYVXByMDkrYkx4TGljVlJUXC9uaFJBPT0iLCJ2YWx1ZSI6Im1cL2xvZ29neFhLVytkVitvQXFDSVFzdEttOUlNcXpqMUxaaUtKdmY2Q0Q5TEI2b0V3N0dFeGxvK0RpUmg5ZTNUaGE2K0Vlb3ZHaTlcLzNKXC9ZUCtnZXVRPT0iLCJtYWMiOiJjNWI3NTM1OTRhNzkxYzI3Mjc2NWZjNWY4N2FlNTFiMTIyOTgyYWQxOTA0Yjc4M2JmYjZkZWJmOTEyZjdmYzhjIn0%3D'
class bcolors:
header = '\033[95m'
gray = '\033[90m'
info = '\033[94m'
success = '\033[92m'
warning = '\033[93m'
danger = '\033[91m'
reset = '\033[0m'
bold = '\033[1m'
underline = '\033[4m'
color = bcolors()
def cookie_str_to_dict(cookie_string):
cookie = {}
for item in cookie_string.split(';'):
key = item.split('=')[0]
value = item.split('=')[1]
cookie[key] = value
return cookie
def load_page(url):
cookies = cookie_str_to_dict(MEGADEDE_COOKIE)
r = requests.get(url, cookies=cookies)
soup = BeautifulSoup(r.text, 'html.parser')
if r.status_code != 200 or not soup.title.text or soup.title.text.strip().lower() == 'error':
print('%s[!]%s %sMegadede%s está caido. Inténtalo más tarde.' % (color.danger, color.reset, color.bold, color.reset))
exit(0)
return soup
def search(query):
soup = load_page(MEGADEDE_URL + 'search/' + quote(query))
results = []
for item in soup.find_all('div', {'class': 'media-container'}):
results.append({
'cover': item.find('div', {'class': 'media-cover'}).find('img')['src'],
'title': item.find('div', {'class': 'media-title'}).text.strip(),
'type': item.find('div', {'class': 'media-sub'}).text.strip(),
'link' : item.find('a', href=True)['href'],
})
return results
def choose_result(items):
paginator = 0
for item in items:
paginator += 1
print('%3s | %8s - ' % (paginator, item['type']), end="")
print(color.warning + item['title'] + color.reset)
print(color.success + '\n[$]' + color.reset + ' ¿Lo tienes? Introduce el NUMERO. ¿No? ENTER y busca de nuevo:', end="")
option = input()
print()
return items[int(option)-1] if option else None
def show_info(item):
soup = load_page(item['link'])
rate = soup.find('div', {'class': 'item-vote'}).find('span', {'class': 'value'}).text.strip()
date = soup.find_all('div', {'class': 'item-info-container'})[0].find('div', {'class': 'mini-content'}).text.strip()
dura = soup.find_all('div', {'class': 'item-info-container'})[1].find('div', {'class': 'mini-content'}).text.strip()
gnre = soup.find('div', {'class': 'genre-container'}).ul.li.get_text().strip()
desc = soup.find('div', {'class': 'expandable'}).get_text().strip()
print('-' * 80)
print(color.header, '\n#', item['title'], color.reset)
print('\nPuntuacion: %s \t\t\t Fecha: %s' % (rate, date))
print('Genero: %-16s \t\t Duración: %s\n' % (gnre, dura))
[print(x) for x in wrap(desc, 80)]
print()
def serie():
temps = soup.find_all('ul', {'class': 'episode-container'})
for temp in temps:
print(temp.find('li', {'class': 'season-header'}).contents[0].strip())
for cap in temp.find_all('a', {'class': 'episode'}):
cap_titl = ' '.join(cap.div.text.split())
cap_link = MEGADEDE_URL + cap['data-href'].strip()[1:]
cap_seen = 'Visto' if 'seen' in cap['class'] else 'No visto'
print(' - %s' % (cap_titl))
print(color.info, ' %s (%s)' % (cap_link + color.gray, cap_seen),color.reset)
def pelicula():
film_link = MEGADEDE_URL +soup.find('div', {'class': 'peli-link'}).button['data-href'].strip()[1:]
print('\t%s' % color.info + film_link + color.reset)
return 0
serie() if 'serie' in item['type'] else pelicula()
print('-' * 80)
if __name__ == '__main__':
while(True):
print(color.success + '\n[$] ' + color.reset + 'Buscador: ', end="")
query = input()
print()
results = search(query)
if len(results) > 0:
option = choose_result(results)
if option:
show_info(option)
else:
print('No hay resultados para esta búsqueda')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment