Skip to content
Snippets Groups Projects

Apply patches from the server

Merged Georg Krause requested to merge server into main
Compare and
13 files
+ 415
342
Compare changes
  • Side-by-side
  • Inline
Files
13
+ 9
10
@@ -2,7 +2,6 @@ import aiohttp
import asyncio
import click
import logging.config
import arq.worker
import functools
import ssl
import sys
@@ -139,8 +138,8 @@ def server():
"""
Start web server.
"""
from . import server
from . import settings
from funkwhale_network import server
from funkwhale_network import settings
server.start(port=settings.PORT)
@@ -167,9 +166,7 @@ async def poll(domain):
pool = await db.get_pool(settings.DB_DSN)
if not domain:
click.echo("Polling all domains…")
crawler = worker.Crawler(
redis_settings=arq.RedisSettings(**settings.REDIS_CONFIG)
)
crawler = worker.Crawler()
return await crawler.poll_all()
try:
@@ -308,13 +305,15 @@ def aggregate_crawl_results(domains_info):
@click.option("-v", "--verbose", is_flag=True)
@click.option("--check", is_flag=True)
def start(*, check, verbose):
worker = arq.worker.import_string("funkwhale_network.worker", "Worker")
logging.config.dictConfig(worker.logging_config(verbose))
#worker = arq.worker.import_string("funkwhale_network.worker", "Worker")
#logging.config.dictConfig(worker.logging_config(verbose))
if check:
exit(worker.check_health())
pass
#exit(worker.check_health())
else:
arq.RunWorkerProcess("funkwhale_network.worker", "Worker", burst=False)
pass
#arq.RunWorkerProcess("funkwhale_network.worker", "Worker", burst=False)
def main():
Loading