Skip to content

Instantly share code, notes, and snippets.

View anuvrat's full-sized avatar

Anuvrat Singh anuvrat

View GitHub Profile
__author__ = 'anuvrat'
def tower_of_hanoi_revisited(num):
val = 0
while num > 0:
val = 3 * val + 2
num -= 1
print val
__author__ = 'anuvrat'
def knapsack(weight):
k = [MAX] * (weight + 1)
for w, n in ((x, y) for y in xrange(0, len(coins)) for x in xrange(1, weight + 1)):
if w == coins[n][1]:
k[w] = min(k[w], coins[n][0])
elif w > coins[n][1] and k[w - coins[n][1]] != MAX:
#include <iostream>
#include <algorithm>
#include <limits.h>
using namespace std;
int wMax = 10005;
int cMax = 505;
void knapsack(int weight, int n, int p[], int w[]) {
from sys import stdin
__author__ = 'anuvrat'
def read(pos):
prev_min = max_value
while pos > 0:
prev_min = min(bi_tree[pos], prev_min)
pos -= (pos & (-1 * pos))
#include <stdio.h>
#include <algorithm>
#include <limits.h>
using namespace std;
// We'll use a Binary Indexed tree to store the ranks of contestants in the third competition.
int biTree[100002];
// A structure to store the ranks of each contestant in the 3 competitions.
__author__ = 'anuvrat'
def get_num(n):
try:
return int(n)
except ValueError:
return -1
if __name__ == '__main__':
__author__ = 'anuvrat'
if __name__ == '__main__':
test_cases = int(raw_input())
for _ in range(test_cases):
sum_diff = 0
n = int(raw_input())
a = 1
2015-01-16 22:15:14+0530 [scrapy] INFO: Scrapy 0.24.4 started (bot: soccerstats)
2015-01-16 22:15:14+0530 [scrapy] INFO: Optional features available: ssl, http11, boto
2015-01-16 22:15:14+0530 [scrapy] INFO: Overridden settings: {'NEWSPIDER_MODULE': 'soccerstats.spiders', 'FEED_URI': 'ratings.json', 'LOG_LEVEL': 'INFO', 'CONCURRENT_REQUESTS_PER_DOMAIN': 16, 'CONCURRENT_REQUESTS': 32, 'SPIDER_MODULES': ['soccerstats.spiders'], 'BOT_NAME': 'soccerstats', 'CONCURRENT_ITEMS': 200, 'ROBOTSTXT_OBEY': True, 'COOKIES_ENABLED': False, 'USER_AGENT': 'Anuvrat Singh (+http://singhanuvrat.com)', 'FEED_FORMAT': 'json', 'LOG_FILE': 'application.log'}
2015-01-16 22:15:14+0530 [scrapy] INFO: Enabled extensions: FeedExporter, LogStats, TelnetConsole, CloseSpider, WebService, CoreStats, SpiderState
2015-01-16 22:15:14+0530 [scrapy] INFO: Enabled downloader middlewares: RobotsTxtMiddleware, HttpAuthMiddleware, DownloadTimeoutMiddleware, UserAgentMiddleware, RetryMiddleware, DefaultHeadersMiddleware, MetaRefreshMiddleware, HttpCo
# -*- coding: utf-8 -*-
"""
Spider to crawl WhoScored web pages.
"""
import re
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.utils.project import get_project_settings
# -*- coding: utf-8 -*-
"""
Link extractors for WhoScored web pages.
"""
import ast
from scrapy.contrib.linkextractors import LinkExtractor
from scrapy.link import Link