Created
April 30, 2015 16:42
-
-
Save IceDragon200/70570b22658805286812 to your computer and use it in GitHub Desktop.
Derp
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
require 'thread' | |
class Worker | |
# I couldn't come up with anything better, so just bear with me. | |
STOP = Object.new | |
# @!attribute in | |
# @return [Queue] input queue/channel | |
attr_accessor :in | |
# @!attribute out | |
# @return [Queue] output queue/channel | |
attr_accessor :out | |
def initialize | |
@in = Queue.new | |
@out = nil | |
end | |
# do stuff with data here | |
def work(data) | |
# you can push to out when your done, just be sure to check if its nil. | |
end | |
def main | |
loop do | |
data = @in.pop | |
break if data == STOP | |
work data | |
end | |
end | |
def run | |
@t = Thread.new { main } | |
end | |
# wait on the worker to stop | |
def await | |
@t.join if @t && [email protected]? | |
@t = nil | |
end | |
# async stop, you don't really care if it stops now or not | |
def stop | |
@in << STOP | |
end | |
# wait for the worker to stop | |
def stop! | |
stop | |
await | |
end | |
end | |
class Scraper < Worker | |
def work(data) | |
# derp | |
scrape_result = {} | |
out << scrape_result | |
end | |
end | |
class Processor < Worker | |
def work(data) | |
# derp | |
processed_result = {} | |
out << processed_result | |
end | |
end | |
class Downloader < Worker | |
def work(data) | |
# do your downloading from the scraped data | |
# ... Download here ... | |
download_result = { data: data, status: :downloaded } | |
out << download_result | |
end | |
end | |
results = [] # please, for the love of ruby, wrap this in an Atom or something. | |
scraper = Scraper.new | |
processor = Processor.new | |
downloader = Downloader.new | |
scraper.out = processor.in | |
processor.out = downloader.in | |
downloader.out = results | |
scraper.run | |
processor.run | |
downloader.run | |
# push jobs to scraper | |
data = { some_data: 1 } | |
scraper.in << data | |
# at this point, you can keep throwing data at it and it will process them | |
# always stop in the order they start. | |
scraper.stop! | |
# when the scraper stops, you stop the processor | |
processor.stop! | |
# when the processor stops, you stop the downloader | |
downloader.stop! | |
# walla :3 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment