Newer
Older
import logging
import traceback
from arq.cron import cron
from funkwhale_network import crawler, settings
from funkwhale_network.db import DB
session: ClientSession = ctx["session"]
return await crawler.check(session=session, domain=domain)
async with DB() as db:
domains = await db.get_all_domains()
print(domains)
for check in domains:
domain = check["name"]
print(f"Checking domain {domain}")
try:
await poll(ctx, domain)
except Exception as e:
print("... couldn't load all information")
logging.error(traceback.format_exc())
async def startup(ctx):
ctx["session"] = ClientSession()
cron_jobs = [cron(update_all, minute=None)]
max_concurrent_tasks = 20
shutdown_delay = 5
timeout_seconds = 15