Skip to content

Instantly share code, notes, and snippets.

View xurenlu's full-sized avatar
🎆
holiday

黄滚 xurenlu

🎆
holiday
View GitHub Profile
@xurenlu
xurenlu / main.m
Last active August 29, 2015 14:07
Mac上检测蓝牙设置连接情况的obj-C 代码
//
// main.m
// xtt
//
// Created by r on 14-10-9.
// Copyright (c) 2014年 r. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <Cocoa/Cocoa.h>
@xurenlu
xurenlu / scrapy.py
Created September 1, 2014 14:32
scrapy example
class MySpider(BaseSpider):
name = 'myspider'
start_urls = (
'http://example.com/page1',
'http://example.com/page2',
)
def parse(self, response):
# collect `item_urls`
for item_url in item_urls:
@xurenlu
xurenlu / url_load_testing.py
Created October 31, 2013 05:58
load an url and check all resource loading status
from threading import Thread,Condition,currentThread
import time,sys,os,signal
import random
from subprocess import *
from BaseHTTPServer import *
from urlparse import urlparse
from urlparse import parse_qs
import urllib
import json
from ghost import Ghost
@xurenlu
xurenlu / preadability.php
Created October 6, 2013 16:46
preadabiliy
<?php
/** vim:set foldmethod=marker: */
/**
* @author renlu<helloasp@hotmail.com>
* @version 1.0.0
* @mail xurenlu@gmail.com
* @homepage http://www.162cm.com/
* */
//require "./phpQuery-onefile.php";
mb_internal_encoding("UTF-8");
@xurenlu
xurenlu / preadability.php
Created October 6, 2013 16:46
preadabiliy
<?php
/** vim:set foldmethod=marker: */
/**
* @author renlu<helloasp@hotmail.com>
* @version 1.0.0
* @mail xurenlu@gmail.com
* @homepage http://www.162cm.com/
* */
//require "./phpQuery-onefile.php";
mb_internal_encoding("UTF-8");
@xurenlu
xurenlu / proxy.php
Created September 10, 2013 08:33
proxy.php
<?php
//update: support
$url= empty($_GET["url"])?$_SERVER["QUERY_STRING"]:$_GET["url"];
if(preg_match('/^http:\/\/.*/',$url)==false) die();
echo curl_fetch($url);
function curl_fetch($url, $timeout=30){
global $CONFIG,$MODULES;
@xurenlu
xurenlu / downloader.py
Created August 26, 2013 16:00
scrapy download middleware
from scrapy.http import Request, FormRequest, HtmlResponse
import gtk
import webkit
import jswebkit
import settings
class WebkitDownloader( object ):
def process_request( self, request, spider ):
if spider.name in settings.WEBKIT_DOWNLOADER:
@xurenlu
xurenlu / downloader.py
Created August 26, 2013 16:00
scrapy download middleware
from scrapy.http import Request, FormRequest, HtmlResponse
import gtk
import webkit
import jswebkit
import settings
class WebkitDownloader( object ):
def process_request( self, request, spider ):
if spider.name in settings.WEBKIT_DOWNLOADER:
@xurenlu
xurenlu / cookie2.py
Created August 26, 2013 15:46
cookie2.py
import os, cookielib, urllib2
cj = cookielib.MozillaCookieJar()
cj.load("./ck2")
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
params = {'domain':"www.renren.com",'origURL':"http://www.renren.com/PLogin.do",'email':"xurenlu@126.com", 'password':"842519"}
req = urllib2.Request(
'http://www.renren.com/PLogin.do'
#
)
@xurenlu
xurenlu / cookie1.py
Created August 26, 2013 15:45
cookie1.py
from cookielib import CookieJar as _CookieJar, DefaultCookiePolicy
#from scrapy.utils.httpobj import urlparse_cached
def sqlite2cookie():#filename
from cStringIO import StringIO
#from pysqlite2 import dbapi2 as sqlite
import sqlite3
import cookielib