Skip to content
Snippets Groups Projects
Verified Commit 00f84797 authored by Georg Krause's avatar Georg Krause
Browse files

fix: Make imports actually work

parent 6d35d92e
No related branches found
No related tags found
1 merge request!43Apply patches from the server
Pipeline #26090 failed
...@@ -6,7 +6,7 @@ import functools ...@@ -6,7 +6,7 @@ import functools
import ssl import ssl
import sys import sys
import output from . import output
SSL_PROTOCOLS = (asyncio.sslproto.SSLProtocol,) SSL_PROTOCOLS = (asyncio.sslproto.SSLProtocol,)
try: try:
...@@ -76,8 +76,8 @@ def async_command(f): ...@@ -76,8 +76,8 @@ def async_command(f):
def conn_command(f): def conn_command(f):
async def wrapper(*args, **kwargs): async def wrapper(*args, **kwargs):
import db from . import db
import settings from . import settings
pool = await db.get_pool(settings.DB_DSN) pool = await db.get_pool(settings.DB_DSN)
try: try:
...@@ -116,7 +116,7 @@ async def migrate(conn): ...@@ -116,7 +116,7 @@ async def migrate(conn):
""" """
Create database tables. Create database tables.
""" """
import db from . import db
await db.create(conn) await db.create(conn)
...@@ -128,7 +128,7 @@ async def clear(conn): ...@@ -128,7 +128,7 @@ async def clear(conn):
""" """
Drop database tables. Drop database tables.
""" """
import db from . import db
await db.clear(conn) await db.clear(conn)
...@@ -158,10 +158,10 @@ async def poll(domain): ...@@ -158,10 +158,10 @@ async def poll(domain):
""" """
Retrieve and store data for the specified domains. Retrieve and store data for the specified domains.
""" """
import crawler from . import crawler
import db from . import db
import settings from . import settings
import worker from . import worker
pool = await db.get_pool(settings.DB_DSN) pool = await db.get_pool(settings.DB_DSN)
if not domain: if not domain:
...@@ -194,8 +194,8 @@ async def crawl(domain, use_public, detail, passes, sort): ...@@ -194,8 +194,8 @@ async def crawl(domain, use_public, detail, passes, sort):
""" """
Crawl the network starting from the given domain(s). Crawl the network starting from the given domain(s).
""" """
import crawler from . import crawler
import settings from . import settings
kwargs = crawler.get_session_kwargs() kwargs = crawler.get_session_kwargs()
async with aiohttp.ClientSession(**kwargs) as session: async with aiohttp.ClientSession(**kwargs) as session:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment