Skip to content
Snippets Groups Projects
Verified Commit 6f00efa3 authored by Eliot Berriot's avatar Eliot Berriot
Browse files

Added option to disable caching in single lookup mode

parent c8c64f36
No related branches found
No related tags found
No related merge requests found
......@@ -33,6 +33,18 @@ class Dummy(Backend):
self._cache[key] = value
class Noop(Backend):
"""
A backend that caches nothing
"""
async def get(self, key):
raise self.NotFound(key)
async def set(self, key, value, expire=None):
return
class Redis(Backend):
def __init__(self, params):
self.params = params
......
......@@ -2,6 +2,7 @@ import asyncio
import aiohttp.client
import json
import ssl
import urllib.parse
from channels.generic.http import AsyncHttpConsumer
......@@ -11,7 +12,6 @@ from . import exceptions
from . import sources
from . import serializers
aiohttp_timeout = aiohttp.ClientTimeout(total=10)
......@@ -93,9 +93,17 @@ class SearchSingleConsumer(AsyncHttpConsumer):
source = sources.registry._data[lookup_type]
except KeyError:
await json_response(self, 400, {"detail": "Invalid lookup"})
params = urllib.parse.parse_qs(
self.scope["query_string"].decode(), keep_blank_values=True
)
if "nocache" in params:
c = cache.Noop()
else:
c = cache.get_default()
try:
async with aiohttp.client.ClientSession(timeout=aiohttp_timeout) as session:
data = await source.get(lookup, session, cache=cache.get_default())
data = await source.get(lookup, session, cache=c)
profile = sources.result_to_retribute_profile(lookup_type, lookup, data)
except (exceptions.SearchError, aiohttp.ClientError) as e:
await json_response(self, 400, {"detail": e.message})
......
......@@ -2,6 +2,7 @@ import json
import aiohttp
from channels.testing import HttpCommunicator
from retribute_api import cache
from retribute_api import providers
from retribute_api.search import consumers
from retribute_api.search import exceptions
......@@ -33,6 +34,31 @@ async def test_search_consumer_success(
assert response["body"] == json.dumps(expected, indent=2, sort_keys=True).encode()
async def test_search_consumer_no_cache(
loop, application, mocker, coroutine_mock, dummycache
):
get = mocker.patch.object(sources.Webfinger, "get", coroutine_mock())
expected = {"dummy": "json"}
get_profile = mocker.patch.object(
sources, "result_to_retribute_profile", return_value=expected
)
communicator = HttpCommunicator(
application, "GET", "/v1/search/webfinger:test@user.domain?nocache"
)
response = await communicator.get_response()
assert get.call_args[0][0] == "test@user.domain"
assert isinstance(get.call_args[1]["cache"], cache.Noop)
get_profile.assert_called_once_with(
"webfinger", "test@user.domain", get.return_value
)
assert response["status"] == 200
assert response["headers"] == [
(b"Content-Type", b"application/json"),
(b"Access-Control-Allow-Origin", b"*"),
]
assert response["body"] == json.dumps(expected, indent=2, sort_keys=True).encode()
async def test_search_multiple(loop, application, mocker, coroutine_mock, dummycache):
get = mocker.patch.object(
sources.Webfinger,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment