diff --git a/Dockerfile b/Dockerfile index c2950031840e093562563d71c79474b9b2d2b266..0c27dcdf762cbee5dfef9d58b67e53f15bed48dd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,7 +6,6 @@ RUN \ postgresql-contrib \ postgresql-dev \ python3-dev \ - py3-psycopg2 \ gcc \ musl-dev @@ -14,6 +13,7 @@ ARG flavour= WORKDIR /app/ COPY ./setup.py /app/ COPY ./setup.cfg /app/ +RUN pip3 install output RUN pip3 install -e .$flavour COPY ./funkwhale_network /app/ diff --git a/docker-compose.yml b/docker-compose.yml index f940e7befa1818567f2fab7ac45f564c64a2bba3..464b49ae1a2cd7dd43b66169c121fc071c0c74c7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,28 +10,28 @@ services: - POSTGRES_DB=${POSTGRES_DB-funkwhale_network} volumes: - ./data/postgres/database:/var/lib/postgresql/data + networks: + - default grafana: env_file: .env restart: ${RESTART_POLICY-unless-stopped} image: ${GRAFANA_IMAGE-grafana/grafana:6.2.0-beta2} - environment: - - "GF_SERVER_ROOT_URL=${GF_SERVER_ROOT_URL-%(protocol)s://%(domain)s/dashboards}" - - "GF_SERVER_DOMAIN=${GF_SERVER_DOMAIN}" - - "GF_SECURITY_ADMIN_PASSWORD=${GF_SECURITY_ADMIN_PASSWORD}" - - "GF_DEFAULT_INSTANCE_NAME=Funkwhale Network" - - "GF_AUTH_ANONYMOUS_ENABLED=true" depends_on: - db - ports: - - "${GF_EXTERNAL_PORT-3000}:3000" volumes: - - grafana:/var/lib/grafana + - ./data/grafana:/var/lib/grafana + networks: + - web + - default + redis: restart: ${RESTART_POLICY-unless-stopped} image: redis:5-alpine volumes: - ./data/redis:/data + networks: + - default api: restart: ${RESTART_POLICY-unless-stopped} @@ -46,11 +46,12 @@ services: - REDIS_DB=${REDIS_DB-0} volumes: - ".:/app/" - ports: - - "${APP_EXTERNAL_PORT-8000}:${APP_PORT-8000}" depends_on: - db - redis + networks: + - web + - default worker: restart: ${RESTART_POLICY-unless-stopped} @@ -68,6 +69,14 @@ services: depends_on: - db - redis + networks: + - default volumes: grafana: + +networks: + default: + web: + external: + name: nginx-proxy-manager_default diff --git a/funkwhale_network/cli.py b/funkwhale_network/cli.py index 377d33d2af9dd5716e17580963c6927f30613e8b..be72b99d05d7919db3ca238bf6d8afa6b138c042 100644 --- a/funkwhale_network/cli.py +++ b/funkwhale_network/cli.py @@ -7,7 +7,7 @@ import functools import ssl import sys -from . import output +import output SSL_PROTOCOLS = (asyncio.sslproto.SSLProtocol,) try: @@ -77,8 +77,8 @@ def async_command(f): def conn_command(f): async def wrapper(*args, **kwargs): - from . import db - from . import settings + import db + import settings pool = await db.get_pool(settings.DB_DSN) try: @@ -117,7 +117,7 @@ async def migrate(conn): """ Create database tables. """ - from . import db + import db await db.create(conn) @@ -129,7 +129,7 @@ async def clear(conn): """ Drop database tables. """ - from . import db + import db await db.clear(conn) @@ -139,8 +139,8 @@ def server(): """ Start web server. """ - from . import server - from . import settings + from funkwhale_network import server + from funkwhale_network import settings server.start(port=settings.PORT) @@ -159,10 +159,10 @@ async def poll(domain): """ Retrieve and store data for the specified domains. """ - from . import crawler - from . import db - from . import settings - from . import worker + import crawler + import db + import settings + import worker pool = await db.get_pool(settings.DB_DSN) if not domain: @@ -197,8 +197,8 @@ async def crawl(domain, use_public, detail, passes, sort): """ Crawl the network starting from the given domain(s). """ - from . import crawler - from . import settings + import crawler + import settings kwargs = crawler.get_session_kwargs() async with aiohttp.ClientSession(**kwargs) as session: diff --git a/funkwhale_network/crawler.py b/funkwhale_network/crawler.py index 5800182f76f3d97f1a3f08877c4ceb074c07a4ce..301613cdc2c7b37e8aee8dab9bfadb68d92fbb6f 100644 --- a/funkwhale_network/crawler.py +++ b/funkwhale_network/crawler.py @@ -2,11 +2,11 @@ import aiohttp import asyncio import sys -from . import db -from . import exceptions -from . import settings -from . import serializers -from . import schemas +from funkwhale_network import db +from funkwhale_network import exceptions +from funkwhale_network import settings +from funkwhale_network import serializers +from funkwhale_network import schemas def get_session_kwargs(): diff --git a/funkwhale_network/middlewares.py b/funkwhale_network/middlewares.py index d1429c5fcff3d7124a6e583d115d11819f2b4e53..a50661f998016724997dea69de440e125720220c 100644 --- a/funkwhale_network/middlewares.py +++ b/funkwhale_network/middlewares.py @@ -1,7 +1,7 @@ from aiohttp import web -from . import db -from . import settings +from funkwhale_network import db +from funkwhale_network import settings @web.middleware diff --git a/funkwhale_network/serializers.py b/funkwhale_network/serializers.py index eaa0ba29a60ad0ade9ae705d246bc4e4a162377d..ac3d46367161c95f39547e586c6ac9a229b91928 100644 --- a/funkwhale_network/serializers.py +++ b/funkwhale_network/serializers.py @@ -1,6 +1,6 @@ import semver -from . import db +from funkwhale_network import db @db.dict_cursor diff --git a/funkwhale_network/settings.py b/funkwhale_network/settings.py index 644faaf9e1fbfbe539e480e4cd3745c3c9b50baa..d0869dd0e7bc84bb3c7362c1177a06b4f80d8502 100644 --- a/funkwhale_network/settings.py +++ b/funkwhale_network/settings.py @@ -16,7 +16,7 @@ CRAWLER_USER_AGENT = env( ) CRAWLER_TIMEOUT = env.int("CRAWLER_TIMEOUT", default=5) -from . import middlewares +from funkwhale_network import middlewares MIDDLEWARES = [middlewares.conn_middleware] diff --git a/funkwhale_network/worker.py b/funkwhale_network/worker.py index 7eff5d94f189529657f91360afe22642a7e9e16e..54cf19be56e3ecf861fbe7763f32d9ef3a3592c7 100644 --- a/funkwhale_network/worker.py +++ b/funkwhale_network/worker.py @@ -1,9 +1,9 @@ import aiohttp import arq -from . import crawler -from . import db -from . import settings +from funkwhale_network import crawler +from funkwhale_network import db +from funkwhale_network import settings class Crawler(arq.Actor):