Newer
Older
from funkwhale_network import crawler, settings
from funkwhale_network.db import DB
session: ClientSession = ctx["session"]
return await crawler.check(session=session, domain=domain)
async with DB() as db:
domains = await db.get_all_domains()
print(domains)
for check in domains:
domain = check["name"]
print(f"Checking domain {domain}")
try:
await poll(ctx, domain)
except Exception as exception:
logger.exception(f"couldn't load all information: {exception}")
async def startup(ctx):
ctx["session"] = ClientSession()
cron_jobs = [cron(update_all, run_at_startup=True, hour=None, unique=True)]
shutdown_delay = 5
timeout_seconds = 15