Add CI job which runs recorder tests on MariaDB (#80586)

Co-authored-by: Franck Nijhof <git@frenck.dev>
This commit is contained in:
Erik Montnemery 2022-10-19 13:04:28 +02:00 committed by GitHub
parent c4bbc439a5
commit f4951a4f31
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 250 additions and 26 deletions

View file

@ -861,6 +861,109 @@ jobs:
run: |
./script/check_dirty
pytest-mariadb:
runs-on: ubuntu-20.04
services:
mariadb:
image: mariadb:10.9.3
ports:
- 3306:3306
env:
MYSQL_ROOT_PASSWORD: password
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
if: |
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
&& github.event.inputs.lint-only != 'true'
&& (needs.info.outputs.test_full_suite == 'true' || needs.info.outputs.tests_glob)
needs:
- info
- base
- gen-requirements-all
- hassfest
- lint-black
- lint-other
- lint-isort
- mypy
strategy:
fail-fast: false
matrix:
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
name: >-
Run tests Python ${{ matrix.python-version }} (mariadb)
steps:
- name: Install additional OS dependencies
run: |
sudo apt-get update
sudo apt-get -y install \
bluez \
ffmpeg \
libmariadb-dev-compat
- name: Check out code from GitHub
uses: actions/checkout@v3.1.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.3.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache@v3.0.11
with:
path: venv
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Install Pytest Annotation plugin
run: |
. venv/bin/activate
# Ideally this should be part of our dependencies
# However this plugin is fairly new and doesn't run correctly
# on a non-GitHub environment.
pip install pytest-github-actions-annotate-failures==0.1.3
- name: Register pytest slow test problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Install SQL Python libraries
run: |
. venv/bin/activate
pip install mysqlclient sqlalchemy_utils
- name: Run pytest (partially)
timeout-minutes: 10
shell: bash
run: |
. venv/bin/activate
python --version
python3 -X dev -m pytest \
-qq \
--timeout=9 \
-n 1 \
--cov="homeassistant.components.recorder" \
--cov-report=xml \
--cov-report=term-missing \
-o console_output_style=count \
--durations=0 \
--durations-min=10 \
-p no:sugar \
--dburl=mysql://root:password@127.0.0.1/homeassistant-test \
tests/components/recorder
- name: Upload coverage artifact
uses: actions/upload-artifact@v3.1.0
with:
name: coverage-${{ matrix.python-version }}-mariadb
path: coverage.xml
- name: Check dirty
run: |
./script/check_dirty
coverage:
name: Upload test coverage to Codecov
runs-on: ubuntu-20.04

View file

@ -925,11 +925,11 @@ def assert_setup_component(count, domain=None):
SetupRecorderInstanceT = Callable[..., Awaitable[recorder.Recorder]]
def init_recorder_component(hass, add_config=None):
def init_recorder_component(hass, add_config=None, db_url="sqlite://"):
"""Initialize the recorder."""
config = dict(add_config) if add_config else {}
if recorder.CONF_DB_URL not in config:
config[recorder.CONF_DB_URL] = "sqlite://" # In memory DB
config[recorder.CONF_DB_URL] = db_url
if recorder.CONF_COMMIT_INTERVAL not in config:
config[recorder.CONF_COMMIT_INTERVAL] = 0

View file

@ -254,6 +254,9 @@ def test_state_changes_during_period_descending(hass_recorder):
start = dt_util.utcnow()
point = start + timedelta(seconds=1)
point2 = start + timedelta(seconds=1, microseconds=2)
point3 = start + timedelta(seconds=1, microseconds=3)
point4 = start + timedelta(seconds=1, microseconds=4)
end = point + timedelta(seconds=1)
with patch(
@ -265,12 +268,19 @@ def test_state_changes_during_period_descending(hass_recorder):
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point
):
states = [
set_state("idle"),
set_state("Netflix"),
set_state("Plex"),
set_state("YouTube"),
]
states = [set_state("idle")]
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point2
):
states.append(set_state("Netflix"))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point3
):
states.append(set_state("Plex"))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point4
):
states.append(set_state("YouTube"))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=end
@ -652,8 +662,13 @@ def record_states(hass) -> tuple[datetime, datetime, dict[str, list[State]]]:
async def test_state_changes_during_period_query_during_migration_to_schema_25(
async_setup_recorder_instance: SetupRecorderInstanceT,
hass: ha.HomeAssistant,
recorder_db_url: str,
):
"""Test we can query data prior to schema 25 and during migration to schema 25."""
if recorder_db_url.startswith("mysql://"):
# This test doesn't run on MySQL / MariaDB; we can't drop table state_attributes
return
instance = await async_setup_recorder_instance(hass, {})
start = dt_util.utcnow()
@ -702,8 +717,13 @@ async def test_state_changes_during_period_query_during_migration_to_schema_25(
async def test_get_states_query_during_migration_to_schema_25(
async_setup_recorder_instance: SetupRecorderInstanceT,
hass: ha.HomeAssistant,
recorder_db_url: str,
):
"""Test we can query data prior to schema 25 and during migration to schema 25."""
if recorder_db_url.startswith("mysql://"):
# This test doesn't run on MySQL / MariaDB; we can't drop table state_attributes
return
instance = await async_setup_recorder_instance(hass, {})
start = dt_util.utcnow()
@ -748,8 +768,13 @@ async def test_get_states_query_during_migration_to_schema_25(
async def test_get_states_query_during_migration_to_schema_25_multiple_entities(
async_setup_recorder_instance: SetupRecorderInstanceT,
hass: ha.HomeAssistant,
recorder_db_url: str,
):
"""Test we can query data prior to schema 25 and during migration to schema 25."""
if recorder_db_url.startswith("mysql://"):
# This test doesn't run on MySQL / MariaDB; we can't drop table state_attributes
return
instance = await async_setup_recorder_instance(hass, {})
start = dt_util.utcnow()

View file

@ -1450,8 +1450,12 @@ async def test_database_lock_and_overflow(
assert not instance.unlock_database()
async def test_database_lock_timeout(recorder_mock, hass):
async def test_database_lock_timeout(recorder_mock, hass, recorder_db_url):
"""Test locking database timeout when recorder stopped."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite: Locking is not implemented for other engines
return
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
instance = get_instance(hass)
@ -1517,8 +1521,13 @@ async def test_database_connection_keep_alive_disabled_on_sqlite(
async_setup_recorder_instance: SetupRecorderInstanceT,
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
recorder_db_url: str,
):
"""Test we do not do keep alive for sqlite."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite, keepalive runs on other engines
return
instance = await async_setup_recorder_instance(hass)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await instance.async_recorder_ready.wait()

View file

@ -135,9 +135,16 @@ async def test_purge_old_states(
async def test_purge_old_states_encouters_database_corruption(
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
async_setup_recorder_instance: SetupRecorderInstanceT,
hass: HomeAssistant,
recorder_db_url: str,
):
"""Test database image image is malformed while deleting old states."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite, wiping the database on error only happens
# with SQLite.
return
await async_setup_recorder_instance(hass)
await _add_test_states(hass)
@ -364,7 +371,7 @@ async def test_purge_method(
assert recorder_runs.count() == 7
runs_before_purge = recorder_runs.all()
statistics_runs = session.query(StatisticsRuns)
statistics_runs = session.query(StatisticsRuns).order_by(StatisticsRuns.run_id)
assert statistics_runs.count() == 7
statistic_runs_before_purge = statistics_runs.all()
@ -431,7 +438,10 @@ async def test_purge_method(
await hass.services.async_call("recorder", "purge", service_data=service_data)
await hass.async_block_till_done()
await async_wait_purge_done(hass)
assert "Vacuuming SQL DB to free space" in caplog.text
assert (
"Vacuuming SQL DB to free space" in caplog.text
or "Optimizing SQL DB to free space" in caplog.text
)
@pytest.mark.parametrize("use_sqlite", (True, False), indirect=True)

View file

@ -14,8 +14,12 @@ from .common import async_wait_recording_done
from tests.common import SetupRecorderInstanceT, get_system_health_info
async def test_recorder_system_health(recorder_mock, hass):
async def test_recorder_system_health(recorder_mock, hass, recorder_db_url):
"""Test recorder system health."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite
return
assert await async_setup_component(hass, "system_health", {})
await async_wait_recording_done(hass)
info = await get_system_health_info(hass, "recorder")
@ -85,9 +89,15 @@ async def test_recorder_system_health_db_url_missing_host(
async def test_recorder_system_health_crashed_recorder_runs_table(
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
async_setup_recorder_instance: SetupRecorderInstanceT,
hass: HomeAssistant,
recorder_db_url: str,
):
"""Test recorder system health with crashed recorder runs table."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite
return
with patch("homeassistant.components.recorder.run_history.RunHistory.load_from_db"):
assert await async_setup_component(hass, "system_health", {})
instance = await async_setup_recorder_instance(hass)

View file

@ -40,8 +40,14 @@ def test_session_scope_not_setup(hass_recorder):
pass
def test_recorder_bad_commit(hass_recorder):
def test_recorder_bad_commit(hass_recorder, recorder_db_url):
"""Bad _commit should retry 3 times."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite: mysql does not raise an OperationalError
# which triggers retries for the bad query below, it raises ProgrammingError
# on which we give up
return
hass = hass_recorder()
def work(session):
@ -542,8 +548,12 @@ def test_warn_unsupported_dialect(caplog, dialect, message):
assert message in caplog.text
def test_basic_sanity_check(hass_recorder):
def test_basic_sanity_check(hass_recorder, recorder_db_url):
"""Test the basic sanity checks with a missing table."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite
return
hass = hass_recorder()
cursor = util.get_instance(hass).engine.raw_connection().cursor()
@ -556,8 +566,12 @@ def test_basic_sanity_check(hass_recorder):
util.basic_sanity_check(cursor)
def test_combined_checks(hass_recorder, caplog):
def test_combined_checks(hass_recorder, caplog, recorder_db_url):
"""Run Checks on the open database."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite
return
hass = hass_recorder()
instance = util.get_instance(hass)
instance.db_retry_wait = 0
@ -635,8 +649,12 @@ def test_end_incomplete_runs(hass_recorder, caplog):
assert "Ended unfinished session" in caplog.text
def test_periodic_db_cleanups(hass_recorder):
def test_periodic_db_cleanups(hass_recorder, recorder_db_url):
"""Test periodic db cleanups."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite
return
hass = hass_recorder()
with patch.object(util.get_instance(hass).engine, "connect") as connect_mock:
util.periodic_db_cleanups(util.get_instance(hass))

View file

@ -1329,9 +1329,13 @@ async def test_backup_start_no_recorder(
async def test_backup_start_timeout(
recorder_mock, hass, hass_ws_client, hass_supervisor_access_token
recorder_mock, hass, hass_ws_client, hass_supervisor_access_token, recorder_db_url
):
"""Test getting backup start when recorder is not present."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite: Locking is not implemented for other engines
return
client = await hass_ws_client(hass, hass_supervisor_access_token)
# Ensure there are no queued events
@ -1366,9 +1370,13 @@ async def test_backup_end(
async def test_backup_end_without_start(
recorder_mock, hass, hass_ws_client, hass_supervisor_access_token
recorder_mock, hass, hass_ws_client, hass_supervisor_access_token, recorder_db_url
):
"""Test backup start."""
if recorder_db_url.startswith("mysql://"):
# This test is specific for SQLite: Locking is not implemented for other engines
return
client = await hass_ws_client(hass, hass_supervisor_access_token)
# Ensure there are no queued events

View file

@ -79,6 +79,11 @@ asyncio.set_event_loop_policy(runner.HassEventLoopPolicy(False))
asyncio.set_event_loop_policy = lambda policy: None
def pytest_addoption(parser):
"""Register custom pytest options."""
parser.addoption("--dburl", action="store", default="sqlite://")
def pytest_configure(config):
"""Register marker for tests that log exceptions."""
config.addinivalue_line(
@ -108,8 +113,19 @@ def pytest_runtest_setup():
def adapt_datetime(val):
return val.isoformat(" ")
# Setup HAFakeDatetime converter for sqlite3
sqlite3.register_adapter(HAFakeDatetime, adapt_datetime)
# Setup HAFakeDatetime converter for pymysql
try:
import MySQLdb.converters as MySQLdb_converters
except ImportError:
pass
else:
MySQLdb_converters.conversions[
HAFakeDatetime
] = MySQLdb_converters.DateTime2literal
def ha_datetime_to_fakedatetime(datetime):
"""Convert datetime to FakeDatetime.
@ -865,7 +881,29 @@ def recorder_config():
@pytest.fixture
def hass_recorder(enable_nightly_purge, enable_statistics, hass_storage):
def recorder_db_url(pytestconfig):
"""Prepare a default database for tests and return a connection URL."""
db_url: str = pytestconfig.getoption("dburl")
if db_url.startswith("mysql://"):
import sqlalchemy_utils
charset = "utf8mb4' COLLATE = 'utf8mb4_unicode_ci"
assert not sqlalchemy_utils.database_exists(db_url)
sqlalchemy_utils.create_database(db_url, encoding=charset)
elif db_url.startswith("postgresql://"):
pass
yield db_url
if db_url.startswith("mysql://"):
sqlalchemy_utils.drop_database(db_url)
@pytest.fixture
def hass_recorder(
recorder_db_url,
enable_nightly_purge,
enable_statistics,
hass_storage,
):
"""Home Assistant fixture with in-memory recorder."""
original_tz = dt_util.DEFAULT_TIME_ZONE
@ -884,7 +922,7 @@ def hass_recorder(enable_nightly_purge, enable_statistics, hass_storage):
def setup_recorder(config=None):
"""Set up with params."""
init_recorder_component(hass, config)
init_recorder_component(hass, config, recorder_db_url)
hass.start()
hass.block_till_done()
hass.data[recorder.DATA_INSTANCE].block_till_done()
@ -897,11 +935,11 @@ def hass_recorder(enable_nightly_purge, enable_statistics, hass_storage):
dt_util.DEFAULT_TIME_ZONE = original_tz
async def _async_init_recorder_component(hass, add_config=None):
async def _async_init_recorder_component(hass, add_config=None, db_url=None):
"""Initialize the recorder asynchronously."""
config = dict(add_config) if add_config else {}
if recorder.CONF_DB_URL not in config:
config[recorder.CONF_DB_URL] = "sqlite://" # In memory DB
config[recorder.CONF_DB_URL] = db_url
if recorder.CONF_COMMIT_INTERVAL not in config:
config[recorder.CONF_COMMIT_INTERVAL] = 0
@ -920,7 +958,10 @@ async def _async_init_recorder_component(hass, add_config=None):
@pytest.fixture
async def async_setup_recorder_instance(
hass_fixture_setup, enable_nightly_purge, enable_statistics
recorder_db_url,
hass_fixture_setup,
enable_nightly_purge,
enable_statistics,
) -> AsyncGenerator[SetupRecorderInstanceT, None]:
"""Yield callable to setup recorder instance."""
assert not hass_fixture_setup
@ -941,7 +982,7 @@ async def async_setup_recorder_instance(
hass: HomeAssistant, config: ConfigType | None = None
) -> recorder.Recorder:
"""Setup and return recorder instance.""" # noqa: D401
await _async_init_recorder_component(hass, config)
await _async_init_recorder_component(hass, config, recorder_db_url)
await hass.async_block_till_done()
instance = hass.data[recorder.DATA_INSTANCE]
# The recorder's worker is not started until Home Assistant is running