Skip to content

Instantly share code, notes, and snippets.

@peace899
Last active August 15, 2023 19:59
Show Gist options
  • Save peace899/65bac645edc6856b729ef04107f51c88 to your computer and use it in GitHub Desktop.
Save peace899/65bac645edc6856b729ef04107f51c88 to your computer and use it in GitHub Desktop.
Async File watchdog with watchfiles.awatch and process detected files in batches.
import asyncio
from watchfiles import awatch
from pathlib import Path
from .helpers import Iterator, AsyncIter
async def produce():
async for changes in awatch(watch_path):
for change_type, path in changes:
print(f" {change_type.name}: {path}")
await loop.run_in_executor(None, queue.put_nowait, path)
async def consume(batch_size):
async for batch in Iterator(queue, batch_size):
print("Got a batch of events:", batch)
await process_batch(batch)
async def process_batch(batch):
async for event in AsyncIter(batch):
await asyncio.sleep(4)
print("Got an event!", event)
watch_path = Path('.')
queue = asyncio.Queue()
#specify number of files/items from the queue to process at a time.
#Did this to save memory on project
batch_size = 3
loop = asyncio.get_event_loop()
asyncio.ensure_future(produce())
asyncio.ensure_future(consume(batch_size))
try:
loop.run_forever()
except KeyboardInterrupt:
loop.close()
import asyncio
class AsyncIter:
def __init__(self, items):
self.items = items
async def __aiter__(self):
for item in self.items:
yield item
class Iterator(object):
def __init__(self, queue, batch_size):
self.queue = queue
self.batch_size = batch_size
def __aiter__(self):
return self
async def __anext__(self):
if self.batch_size == 1:
return await self.queue.get()
batch = []
for _ in range(self.batch_size):
try:
item = await self.queue.get()
batch.append(item)
except asyncio.QueueEmpty:
break
if not batch:
raise StopAsyncIteration
return batch
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment