Created
January 24, 2015 17:15
-
-
Save minimal/da1a416bfa488066e843 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def strategy(*args, **kwargs): | |
return ss.StrategyTable().strategy(*args, **kwargs) | |
PagesTuple = namedtuple("PagesTuple", ("url", "url_hash", "description", | |
"title", "classified")) | |
def make_pages(tuples): | |
pages = pages_pb2.Pages() | |
for tup in tuples: | |
page = pages.pages.add() | |
page.url = tup.url | |
page.url_hash = page.url_hash | |
page.description = tup.description | |
page.title = tup.title | |
page.classified = tup.classified | |
return pages | |
def test_tail_redis_batch_queue_qc(classifier_path): | |
pst = strategy(PagesTuple(unicode, unicode, unicode, unicode, | |
hs.descriptors.Just(0))) | |
lens = ProtoBufPagesLens() | |
red = redis.StrictRedis(db=2) | |
task_queue = RedisTaskQueue(red, lens, RedisOutQueue(red, lens)) | |
@hs.given(items=ListStrategy([ListStrategy([pst])], average_length=5), | |
verifier_settings=hs.settings.Settings(timeout=10)) | |
def fun(items): | |
red.flushdb() | |
min_messages = 100 | |
# make sure list is not empty and has no empty pages | |
hs.assume(len(items) > 0) | |
queue_items = [make_pages(pages) for pages in items if len(pages) > 0] | |
hs.assume(len(queue_items) > 0) | |
total_pages = sum([len(i) for i in items]) | |
for item in queue_items: | |
task_queue.put_task(item) | |
print("about to tail, ", len(items)) | |
print([len(i) for i in items]) | |
flow.tail_queue(classifier_path, task_queue, no_loop=True, | |
get_loop_timeout=0.1, min_messages=min_messages) | |
out_pages = task_queue.out_queue.get() | |
# result has property of having all the queued pages or else | |
# at least min_messages number of pages | |
assert (total_pages == len(out_pages.pages) | |
or min_messages <= len(out_pages.pages)) | |
# results are serialised have seen the classifier | |
for orig, new in izip(toolz.concat(page.pages for page in queue_items), | |
out_pages.pages): | |
assert orig.title == new.title | |
assert new.classified in (new.CLASSIFIED, new.UNCLASSIFIABLE) | |
print(total_pages, len(out_pages.pages)) | |
fun() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment