Skip to content

Instantly share code, notes, and snippets.

@joest67
Created April 28, 2025 05:52
Show Gist options
  • Select an option

  • Save joest67/7954d261660c06c91e40e692010ef63f to your computer and use it in GitHub Desktop.

Select an option

Save joest67/7954d261660c06c91e40e692010ef63f to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
# coding: utf-8
import re
import Cookie
import datetime
import time
import requests
from pycookiecheat import chrome_cookies
from bs4 import BeautifulSoup
QUERY_AUCTION_URL = 'http://m.fang.vanke.com/ActivityTarget/GetAuction'
AUCTION_URL = 'http://m.fang.vanke.com/ActivityTarget/AddPrice'
SITE = 0
def echo_now(msg=''):
if msg:
print u'({}){}'.format(msg, datetime.datetime.now()).center(50, '=')
else:
print str(datetime.datetime.now()).center(50, '=')
def loads_cookies():
return chrome_cookies('http://m.fang.vanke.com')
cookies = Cookie.SimpleCookie()
cookies.load(cookies_str)
ret = {k: v.value for k, v in cookies.iteritems()}
return ret
fake_headers = {
'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1',
}
def _parse_room_detail(room_id, site):
def _filter(tag):
if tag.has_attr('class') and tag.attrs.get('class') and \
'target_info' in tag.attrs.get('class'):
return True
return False
DETAIL_URL = 'http://m.fang.vanke.com/ActivityTarget/Auction?id={0}'.format(room_id)
print DETAIL_URL
resp = requests.get(DETAIL_URL, cookies=loads_cookies(), headers=fake_headers)
soup = BeautifulSoup(resp.text, 'html.parser')
token = filter(lambda v: v.attrs.get('name')=='__RequestVerificationToken', soup.find_all('input'))[0].attrs.get('value')
ret = dict(
token=token
)
detail = soup.find_all(_filter)[0]
print u'房间信息'.center(50, '-')
for li in detail.find_all('li'):
k, v= filter(lambda x: bool(x), map(lambda x: x.contents[0].strip(' \r\n\t'), list(li.find_all('div'))))
print k, v
if u'总价' in k:
ret['price'] = v.split(' ')[0]
return ret
def _parse_auction(html_doc):
soup = BeautifulSoup(html_doc, 'html.parser')
success = soup.li.attrs['successful'] == str('True')
for item in list(soup.find_all('span')):
print item.contents[0].strip('\n\t\r '),
return success
def check_if_auction(room_id, site):
params = dict(
pageSize=10,
pageIndex=1,
sortDirection=1,
sortField='l.CreateDate',
id=room_id
)
resp = requests.post(QUERY_AUCTION_URL, data=params, cookies=loads_cookies(),
headers=fake_headers)
# not auction
if not resp.json()['success']:
return None
return _parse_auction(resp.json()['html'])
# TODO 是否需要确认下单
NEED_CONFIRM = True
# START_TIME_FMT = '2017-12-21 19:00:00'
START_TIME_FMT = '2018-01-12 19:00:01'
START_TIME = datetime.datetime.strptime(START_TIME_FMT, '%Y-%m-%d %H:%M:%S')
def countdown():
now = datetime.datetime.now()
print now
if now > START_TIME:
return False
return True
AUCTION_TIMES = 0
TEST = True
def auction(room_id, site, idx=0):
extra_params = _parse_room_detail(room_id, site)
params = {
'id': str(room_id),
'site': str(site),
'__RequestVerificationToken': extra_params['token'],
'price': extra_params['price'],
}
echo_now(u'开始抢购')
return
global AUCTION_TIMES
AUCTION_TIMES += 1
# 只请求 3 次
if AUCTION_TIMES > 3:
print u'请求超过次数 {}, 用手机抢'.format(AUCTION_TIMES)
exit(1)
resp = requests.post(AUCTION_URL, data=params, cookies=loads_cookies(),
headers=fake_headers)
print resp.text
data = resp.json()
if data.get('Message', {}).get('text', '') == u'您点击太快了,休息一下吧。':
print u'请求到太快了,被封锁了,失败'
exit(1)
echo_now(u'下单请求结束')
echo_now(u'检查抢购结果')
auction_success = check_if_auction(room_id, site)
if auction_success is None:
if idx > 1:
print u'还没人抢到,换下一个'
else:
print u'还没人抢到,重试一次'
return auction(room_id, site, idx+1)
elif auction_success:
print u'抢到了'
exit(0)
else:
print u'没抢到'
echo_now(u'开始下一个请求')
if NEED_CONFIRM:
key = raw_input('回车继续/其他取消:')
if key != '':
return
else:
exit(0)
def auction_by_id():
ordered_rooms = [
(2961040, '10-402'),
(2961036, '10-202'),
]
for room_id, _ in ordered_rooms:
auction(room_id, SITE)
echo_now()
auction_by_id()
echo_now()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment