Skip to content
Snippets Groups Projects
test_crawler.py 8.38 KiB
Newer Older
  • Learn to ignore specific revisions
  • import aiohttp
    import marshmallow
    
    import psycopg2
    
    from funkwhale_network import crawler, serializers
    
    async def test_fetch_nodeinfo(session, responses):
    
        domain = "test.domain"
        well_known_payload = {
            "links": [
                {
                    "rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
                    "href": "https://test.domain/nodeinfo/2.0/",
                }
            ]
        }
        payload = {"hello": "world"}
        responses.get(
            "https://test.domain/.well-known/nodeinfo", payload=well_known_payload
        )
        responses.get("https://test.domain/nodeinfo/2.0/", payload=payload)
    
        result = await crawler.fetch_nodeinfo(session, domain)
    
        assert result == payload
    
    
    
    async def test_check(db_conn, populated_db, session, mocker, coroutine_mock):
        fetch_nodeinfo = mocker.patch.object(
            crawler, "fetch_nodeinfo", coroutine_mock(return_value={"hello": "world"})
        )
        clean_nodeinfo = mocker.patch.object(
            crawler, "clean_nodeinfo", return_value={"cleaned": "nodeinfo"}
        )
        clean_check = mocker.patch.object(
            crawler, "clean_check", return_value={"cleaned": "check"}
        )
        save_check = mocker.patch.object(crawler, "save_check", coroutine_mock())
        await crawler.check(db_conn, session, "test.domain")
        fetch_nodeinfo.assert_called_once_with(session, "test.domain")
        clean_nodeinfo.assert_called_once_with({"hello": "world"})
        clean_check.assert_called_once_with(
            {"up": True, "domain": "test.domain"}, {"cleaned": "nodeinfo"}
        )
        save_check.assert_called_once_with(db_conn, {"cleaned": "check"})
    
    
    async def test_check_nodeinfo_connection_error(
        populated_db, db_conn, session, mocker, coroutine_mock
    ):
        fetch_nodeinfo = mocker.patch.object(
            crawler,
            "fetch_nodeinfo",
            coroutine_mock(side_effect=aiohttp.client_exceptions.ClientError),
        )
        save_check = mocker.patch.object(crawler, "save_check", coroutine_mock())
        await crawler.check(db_conn, session, "test.domain")
        fetch_nodeinfo.assert_called_once_with(session, "test.domain")
        save_check.assert_called_once_with(db_conn, {"domain": "test.domain", "up": False})
    
    
    def test_clean_nodeinfo(populated_db):
    
        payload = {
            "version": "2.0",
            "software": {"name": "funkwhale", "version": "0.18-dev+git.b575999e"},
            "openRegistrations": False,
    
            "usage": {"users": {"total": 78, "activeHalfyear": 42, "activeMonth": 23}},
    
            "metadata": {
                "private": False,
    
                "library": {
                    "federationEnabled": True,
                    "federationNeedsApproval": True,
                    "anonymousCanListen": True,
                    "tracks": {"total": 98552},
                    "artists": {"total": 9831},
                    "albums": {"total": 10872},
                    "music": {"hours": 7650.678055555555},
                },
                "usage": {
                    "favorites": {"tracks": {"total": 1683}},
                    "listenings": {"total": 50294},
                },
            },
        }
        expected = {
            "software": {
                "name": "funkwhale",
                "version": {
                    "major": 0,
                    "minor": 18,
                    "patch": 0,
                    "prerelease": "dev",
                    "build": "git.b575999e",
                },
            },
            "openRegistrations": False,
    
            "usage": {"users": {"total": 78, "activeHalfyear": 42, "activeMonth": 23}},
    
            "metadata": {
                "private": False,
    
                "library": {
                    "federationEnabled": True,
                    "anonymousCanListen": True,
                    "tracks": {"total": 98552},
                    "artists": {"total": 9831},
                    "albums": {"total": 10872},
                    "music": {"hours": 7650},
                },
                "usage": {"listenings": {"total": 50294}},
    
            },
        }
        result = crawler.clean_nodeinfo(payload)
    
        assert result == expected
    
    
    def test_clean_nodeinfo_raises_on_validation_failure():
        payload = {}
        with pytest.raises(marshmallow.ValidationError):
            crawler.clean_nodeinfo({})
    
    
    
    async def test_clean_check_result():
        check = {"up": True, "https": True, "domain": "test.domain"}
    
        data = {
            "software": {
                "name": "funkwhale",
                "version": {
                    "major": 0,
                    "minor": 18,
                    "patch": 0,
                    "prerelease": "dev",
                    "build": "git.b575999e",
                },
            },
            "openRegistrations": False,
            "usage": {"users": {"total": 78, "activeHalfyear": 42, "activeMonth": 23}},
            "metadata": {
                "private": False,
    
                "library": {
                    "federationEnabled": True,
                    "anonymousCanListen": True,
                    "tracks": {"total": 98552},
                    "artists": {"total": 9831},
                    "albums": {"total": 10872},
                    "music": {"hours": 7650},
                },
    
                "usage": {"listenings": {"total": 50294}, "downloads": {"total": 91273}},
    
            },
        }
    
        expected = {
            "domain": "test.domain",
    
            "up": True,
            "open_registrations": False,
            "federation_enabled": True,
            "anonymous_can_listen": True,
            "private": False,
            "usage_users_total": 78,
            "usage_users_active_half_year": 42,
            "usage_users_active_month": 23,
            "usage_listenings_total": 50294,
    
            "usage_downloads_total": 91273,
    
            "library_tracks_total": 98552,
            "library_albums_total": 10872,
            "library_artists_total": 9831,
            "library_music_hours": 7650,
            "software_name": "funkwhale",
            "software_version_major": 0,
            "software_version_minor": 18,
            "software_version_patch": 0,
            "software_prerelease": "dev",
            "software_build": "git.b575999e",
        }
    
        assert crawler.clean_check(check, data) == expected
    
    
    
    async def test_save_check(populated_db, db_conn, factories):
    
        await factories["Check"].c(domain="test.domain", private=False)
    
        await serializers.create_domain(db_conn, {"name": "test.domain"})
        data = {
            "domain": "test.domain",
    
            "up": True,
            "open_registrations": False,
            "federation_enabled": True,
            "anonymous_can_listen": True,
            "private": False,
            "usage_users_total": 78,
            "usage_users_active_half_year": 42,
            "usage_users_active_month": 23,
            "usage_listenings_total": 50294,
    
            "usage_downloads_total": 7092,
    
            "library_tracks_total": 98552,
            "library_albums_total": 10872,
            "library_artists_total": 9831,
            "library_music_hours": 7650,
            "software_name": "funkwhale",
            "software_version_major": 0,
            "software_version_minor": 18,
            "software_version_patch": 0,
            "software_prerelease": "dev",
            "software_build": "git.b575999e",
        }
    
    
        sql = "SELECT * from checks ORDER BY time DESC"
    
        result = await crawler.save_check(db_conn, data)
    
    
        async with db_conn.cursor(
            cursor_factory=psycopg2.extras.RealDictCursor
        ) as db_cursor:
            await db_cursor.execute(sql)
            row = await db_cursor.fetchone()
            data["time"] = result["time"]
            assert data == result
            assert row == data
            await db_cursor.execute(
                "SELECT * FROM domains WHERE name = %s", ["test.domain"]
            )
            domain = await db_cursor.fetchone()
    
    
        assert domain["node_name"] == "Test Domain"
    
    
    
    async def test_private_domain_delete_past_checks(
        populated_db, db_cursor, db_conn, factories
    ):
        await factories["Check"].c(domain="test.domain", private=False)
        data = {
            "domain": "test.domain",
            "node_name": "Test Domain",
            "up": True,
            "open_registrations": False,
            "federation_enabled": True,
            "anonymous_can_listen": True,
            "private": True,
            "software_name": "funkwhale",
            "software_version_major": 0,
            "software_version_minor": 18,
            "software_version_patch": 0,
            "software_prerelease": "dev",
            "software_build": "git.b575999e",
        }
    
        sql = "SELECT * from checks"
        assert await crawler.save_check(db_conn, data) is None
    
        async with db_conn.cursor() as db_cursor:
            await db_cursor.execute(sql)
            result = await db_cursor.fetchall()
    
        assert result == []