Skip to content

Instantly share code, notes, and snippets.

View iKlotho's full-sized avatar
🏠
Working from home

Umut Kahriman iKlotho

🏠
Working from home
  • İstanbul, Turkey
View GitHub Profile
# -*- coding: iso-8859-1 -*-
import requests, time, re, sys, json, urllib2
from downloadDM import downloadDM
from BeautifulSoup import BeautifulSoup
class lequipeParse(downloadDM):
def __init__(self):
self.base_url = "http://video.lequipe.fr/morevideos/48/1"
import requests
import time
from BeautifulSoup import BeautifulSoup
s = requests.Session()
class TwitterBot:
def __init__(self,id,pasw,twit):
# needed variables
self.url = "https://twitter.com/sessions"
@iKlotho
iKlotho / tckimlikdogrulama.py
Last active April 25, 2025 17:13
TC kimlik no sorgulama python
import xml.etree.ElementTree as ET
import requests
url = "https://tckimlik.nvi.gov.tr/Service/KPSPublic.asmx?WSDL"
headers = {"content-type": "text/xml"}
# Change this
tc_no = "XXXXXXXXXXXX"
ad = "NAME"
soyad = "SURNAME"
@iKlotho
iKlotho / mtdbscrap.py
Created June 24, 2015 23:15
mtdb movie script
import requests
import json
from BeautifulSoup import *
import sqlite3
url_1 = "http://mtdb.info/titles/paginate?_token=ufJeUG1TzWzTQ2LZAfLm9bdqCFTzpbErfOdBP5wA&perPage=18&page="
url_2 = "&order=titleAsc&type=movie&minRating=&maxRating="
conn = sqlite3.connect('movie.db')
c = conn.cursor()
c.execute('''CREATE TABLE movies
@iKlotho
iKlotho / pyTranslateConsole.py
Created June 5, 2015 14:40
Python Console Translate
#-*- coding: utf-8 -*-
import requests
while True:
entry = raw_input("[+] Cevirelecek Kelime -> ")
url = 'http://translate.google.com/translate_a/t?client=t&text='+entry+'&hl=en&sl=en&tl=tr'
r = requests.post(url)
resp = r.content
print '[-]' + resp.split('],')[1].replace("[","").replace("]","")
print
@iKlotho
iKlotho / vine.py
Last active January 7, 2024 11:08
vine.co download popular vines
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import urllib
import json
#url = 'https://vine.co/api/timelines/popular?size=30'
#url = 'https://vine.co/api/timelines/promoted'
@iKlotho
iKlotho / problem4.py
Last active January 28, 2020 18:19
blog.svpino.com Solution to Problem 4 Python
#Write a function that given a list of non negative integers, arranges them such that they form the largest possible number.
#For example, given [50, 2, 1, 9], the largest formed number is 95021
# question url https://blog.svpino.com/2015/05/07/five-programming-problems-every-software-engineer-should-be-able-to-solve-in-less-than-1-hour
numbers = [5, 51, 56, 50]
result = ''
def check(number):
if len(str(number)) == 1:
@iKlotho
iKlotho / useragents.py
Created May 8, 2015 11:58
User Agents
import random
uagent = ['Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.1b3) Gecko/20090305 Firefox/3.1b3 GTB5',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; ko; rv:1.9.1b2) Gecko/20081201 Firefox/3.1b2',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.9b5) Gecko/2008032620 Firefox/3.0b5',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.1.12) Gecko/20080214 Firefox/2.0.0.12',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; cs; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.8',
'Mozilla/5.0 (X11; U; OpenBSD i386; en-US; rv:1.8.0.5) Gecko/20060819 Firefox/1.5.0.5',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; es-ES; rv:1.8.0.3) Gecko/20060426 Firefox/1.5.0.3',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.7.9) Gecko/20050711 Firefox/1.0.5',
'Mozilla/5.0 (Windows; Windows NT 6.1; rv:2.0b2) Gecko/20100720 Firefox/4.0b2',
@iKlotho
iKlotho / YoutubeAnnotationsScraper.py
Created May 7, 2015 19:11
Youtube annotations Scraper
import requests
from bs4 import BeautifulSoup
# set userid to youtube channel id
# script will get all the annotations from user's videos
url = 'https://www.youtube.com/user/USERID/videos/browse_ajax?action_continuation=1&continuation=4qmFsgI8EhhVQ0VrOTBTOGl1SHQwZVV6Yks4TmtwX2caIEVnWjJhV1JsYjNNZ0FEQUJPQUZnQVdvQWVnRXl1QUVB'
r = requests.get(url)
soup = BeautifulSoup(r.text)
@iKlotho
iKlotho / webscrap.py
Created April 1, 2015 15:24
Getting data from web and store them into sqlite db
from bs4 import BeautifulSoup
from urllib2 import urlopen
import sqlite3
BASE_URL = "http://www.chicagoreader.com"
conn = sqlite3.connect('webscrap.db') # creating database
c = conn.cursor()
c.execute('''CREATE TABLE stocks
(category_url text, category text, winner text, runners_up text)''')