Skip to content

Commit

Permalink
v0.7.4 Release Candidate (#89)
Browse files Browse the repository at this point in the history
* Improved unit tests
* Defect fix for golf when team events returned
  • Loading branch information
vasqued2 authored Apr 30, 2023
1 parent 43515fe commit 5dd33ef
Show file tree
Hide file tree
Showing 14 changed files with 2,615 additions and 85 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/pylint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.9"]
python-version: ["3.10"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pytest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.9', '3.10']
python-version: ['3.9', '3.10', '3.11']

steps:
- uses: actions/checkout@v3
Expand Down
72 changes: 43 additions & 29 deletions custom_components/teamtracker/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,9 +248,11 @@ async def async_update_game_data(self, config, hass) -> dict:
"last_update"
] = DEFAULT_LAST_UPDATE # set to fixed time for compares
values["kickoff_in"] = DEFAULT_KICKOFF_IN
with open(path, "w", encoding="utf-8") as convert_file:
convert_file.write(json.dumps(values, indent=4))

try:
with open(path, "w", encoding="utf-8") as convert_file:
convert_file.write(json.dumps(values, indent=4))
except:
_LOGGER.debug("%s: Error creating results file '%s'", sensor_name, path)
return values

#
Expand Down Expand Up @@ -293,17 +295,23 @@ async def async_call_api(self, config, hass, lang) -> dict:

if file_override:
_LOGGER.debug("%s: Overriding API for '%s'", sensor_name, team_id)
async with aiofiles.open("/share/tt/test.json", mode="r") as f:
file_path = "/share/tt/test.json"
if not os.path.exists(file_path):
file_path = "tests/tt/all.json"
async with aiofiles.open(file_path, mode="r") as f:
contents = await f.read()
data = json.loads(contents)
else:
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
_LOGGER.debug(
"%s: Getting state for '%s' from %s", sensor_name, team_id, url
)
if r.status == 200:
data = await r.json()
try:
async with session.get(url, headers=headers) as r:
_LOGGER.debug(
"%s: Getting state for '%s' from %s", sensor_name, team_id, url
)
if r.status == 200:
data = await r.json()
except:
data = None

num_events = 0
if data is not None:
Expand Down Expand Up @@ -342,15 +350,18 @@ async def async_call_api(self, config, hass, lang) -> dict:
url = URL_HEAD + sport_path + "/" + league_path + URL_TAIL + url_parms

async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
_LOGGER.debug(
"%s: Getting state without date constraint for '%s' from %s",
sensor_name,
team_id,
url,
)
if r.status == 200:
data = await r.json()
try:
async with session.get(url, headers=headers) as r:
_LOGGER.debug(
"%s: Getting state without date constraint for '%s' from %s",
sensor_name,
team_id,
url,
)
if r.status == 200:
data = await r.json()
except:
data = None

num_events = 0
if data is not None:
Expand Down Expand Up @@ -391,18 +402,21 @@ async def async_call_api(self, config, hass, lang) -> dict:
url = URL_HEAD + sport_path + "/" + league_path + URL_TAIL + url_parms

async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
_LOGGER.debug(
"%s: Getting state without language for '%s' from %s",
sensor_name,
team_id,
url,
)
if r.status == 200:
data = await r.json()

try:
async with session.get(url, headers=headers) as r:
_LOGGER.debug(
"%s: Getting state without language for '%s' from %s",
sensor_name,
team_id,
url,
)
if r.status == 200:
data = await r.json()
except:
data = None

return data, file_override


async def async_update_values(self, config, hass, data, lang) -> dict:
"""Return values based on the data passed into method"""
Expand Down
2 changes: 1 addition & 1 deletion custom_components/teamtracker/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@

# Misc
TEAM_ID = ""
VERSION = "v0.7.3"
VERSION = "v0.7.4"
ISSUE_URL = "https://github.com/vasqued2/ha-teamtracker"
DOMAIN = "teamtracker"
PLATFORM = "sensor"
Expand Down
2 changes: 1 addition & 1 deletion custom_components/teamtracker/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ async def async_find_search_key(
):
"""Check if there is a match on wildcard, team_abbreviation, event_name, or athlete_name"""

if search_key == "*" and (competitor["type"] == "athlete" or sport_path != "tennis"):
if search_key == "*" and (competitor["type"] == "athlete" or sport_path not in ["tennis", "golf"]):
_LOGGER.debug(
"%s: Found competitor using wildcard '%s'; parsing data.",
sensor_name,
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ forced_separate = [
combine_as_imports = true

[tool.pylint.MAIN]
py-version = "3.9"
py-version = "3.10"
ignore = [
"tests",
]
Expand Down
26 changes: 26 additions & 0 deletions tests/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@
"conference_id": "9999",
}

CONFIG_DATA2 = {
"league_id": "NCAAF",
"team_id": "OSU",
"name": "test_tt_all_test02",
"timeout": 120,
"conference_id": "5",
}

TEST_DATA = [
{
"sensor_name": "test_tt_all_test01",
Expand Down Expand Up @@ -177,6 +185,24 @@
"league": "XXX",
"team_abbr": "IND",
},
{
"sensor_name": "test_tt_all_test29",
"sport": "racing",
"league": "F1",
"team_abbr": "SAINTZ",
},
{
"sensor_name": "test_tt_all_test30",
"sport": "racing",
"league": "F1",
"team_abbr": "VERSTAPPEN",
},
{
"sensor_name": "test_tt_all_test31",
"sport": "racing",
"league": "F1",
"team_abbr": "STROLLZ",
},
]

MULTIGAME_DATA = [
Expand Down
53 changes: 17 additions & 36 deletions tests/test_init.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,27 @@
"""Tests for init."""
import pytest
""" Tests for TeamTracker """

import pytest
from pytest_homeassistant_custom_component.common import MockConfigEntry

from custom_components.teamtracker.const import DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from tests.const import CONFIG_DATA, CONFIG_DATA2

from tests.const import CONFIG_DATA


@pytest.fixture(autouse=True)
@pytest.fixture(autouse=False)
def expected_lingering_timers() -> bool:
"""Temporary ability to bypass test failures due to lingering timers.
""" Temporary ability to bypass test failures due to lingering timers.
Parametrize to True to bypass the pytest failure.
@pytest.mark.parametrize("expected_lingering_timers", [True])
This should be removed when all lingering timers have been cleaned up.
"""
return False


@pytest.mark.parametrize("expected_lingering_timers", [True])
#@pytest.mark.parametrize("expected_lingering_timers", [True])
async def test_setup_entry(
hass,
):
"""Test settting up entities."""
""" test setup """

entry = MockConfigEntry(
domain=DOMAIN,
title="team_tracker",
Expand All @@ -38,14 +36,18 @@ async def test_setup_entry(
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1

assert await entry.async_unload(hass)
await hass.async_block_till_done()

@pytest.mark.parametrize("expected_lingering_timers", [True])

#@pytest.mark.parametrize("expected_lingering_timers", [True])
async def test_unload_entry(hass):
"""Test unloading entities."""
""" test unload """

entry = MockConfigEntry(
domain=DOMAIN,
title="team_tracker",
data=CONFIG_DATA,
data=CONFIG_DATA2,
)

entry.add_to_hass(hass)
Expand All @@ -65,26 +67,5 @@ async def test_unload_entry(hass):
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 0


# async def test_import(hass):
# """Test importing a config."""
# entry = MockConfigEntry(
# domain=DOMAIN,
# title="NFL",
# data=CONFIG_DATA,
# )
# await async_setup_component(hass, "persistent_notification", {})
# with patch(
# "custom_components.teamtracker.async_setup_entry",
# return_value=True,
# ) as mock_setup_entry:

# ent_reg = async_get(hass)
# ent_entry = ent_reg.async_get_or_create(
# "sensor", DOMAIN, unique_id="replaceable_unique_id", config_entry=entry
# )
# entity_id = ent_entry.entity_id
# entry.add_to_hass(hass)
# await hass.config_entries.async_setup(entry.entry_id)
# assert entry.unique_id is None
# assert ent_reg.async_get(entity_id).unique_id == entry.entry_id
assert await entry.async_unload(hass)
await hass.async_block_till_done()
29 changes: 14 additions & 15 deletions tests/test_sensor.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,25 @@
"""Test NFL Sensor"""
""" Test TeamTracker Sensor """

import pytest

from pytest_homeassistant_custom_component.common import MockConfigEntry

from custom_components.teamtracker.const import DOMAIN

#from custom_components.teamtracker.sensor import TeamTrackerScoresSensor
from tests.const import CONFIG_DATA


@pytest.fixture(autouse=True)
@pytest.fixture(autouse=False)
def expected_lingering_timers() -> bool:
"""Temporary ability to bypass test failures due to lingering timers.
Parametrize to True to bypass the pytest failure.
@pytest.mark.parametrize("expected_lingering_timers", [True])
This should be removed when all lingering timers have been cleaned up.
"""" Temporary ability to bypass test failures due to lingering timers.
Parametrize to True to bypass the pytest failure.
@pytest.mark.parametrize("expected_lingering_timers", [True])
This should be removed when all lingering timers have been cleaned up.
"""
return False

@pytest.mark.parametrize("expected_lingering_timers", [True])

#@pytest.mark.parametrize("expected_lingering_timers", [True])
async def test_sensor(hass, mocker):
""" Make sure sensor gets added """
""" test sensor """

entry = MockConfigEntry(
domain=DOMAIN,
Expand All @@ -31,11 +29,12 @@ async def test_sensor(hass, mocker):

mocker.patch("locale.getlocale", return_value=("en", 0))

# contents = "{}"
# mocker.patch('aiofiles.open', return_value=mocker.mock_open(read_data=contents).return_value)

entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()

assert "teamtracker" in hass.config.components

assert await entry.async_unload(hass)
await hass.async_block_till_done()

27 changes: 27 additions & 0 deletions tests/tt/README.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,32 @@
How to regression test.

Use test_event.py to validate that sensor is returning the expected results across all supported sports and game situations.

The all.json file contains the json response for the simulated API call for the TEST_DATA sensors defined in const.py.
The expected result for each sensor defined in TEST_DATA is in the results directory.
The test_event.py file will test each sensor in the TEST_DATA and confirm it matches the expected result in the directory.
The test will fail if there are differences.
If the expected results are supposed to be different, the expected results file should be updated so the test will pass.
The expected results files can be updated manually.
The expected results files can also be regenerated in the /share/tt/results directory
Add the yaml to your config to create the sensor.
Copy the all.json file to /share/tt/test.yaml
Create an empty /share/tt/results directory
Restart HA
The integrationw will create a new version of the expected results file. Compare to prior version to ensure no unexpected changes were introduce.
Update the expected results in the test/results directory.

Use test_multigame.py to validate the sensor pulls the correct competition to test situations like doubleheaders in baseball.

The multigame.json file contains the json response for the simulated API call for the MULTIGAME_DATA sensors defined in const.py.
The test_multigame.py file will test each sensor in MULTIGAME_DATA and confirm it matches expected results.
The test will fail if the sensor returns the wrong competition.
The test only validates if the right competition is returned. It does not do the deep compare of each value as the prior test does.

Legacy tests

The additional files are legacy manual tests that can be run for even deeper tests of specific sports if desired.

The .json files contain the json for the simulated API call.
The .yaml files contain the yaml to create the sensors for the corresponding .json file
The -dump.txt files contain the expected output from the sensor dump.
Expand Down
Loading

0 comments on commit 5dd33ef

Please sign in to comment.