Home Assistant Git Exporter

This commit is contained in:
root
2024-05-31 13:07:35 +02:00
parent 64a0536537
commit 60abdd866c
275 changed files with 71113 additions and 1 deletions

View File

@@ -0,0 +1,452 @@
"""https://github.com/dummylabs/thewatchman§"""
from datetime import timedelta
import logging
import os
import time
import json
import voluptuous as vol
from homeassistant.loader import async_get_integration
from homeassistant.helpers import config_validation as cv
from homeassistant.components import persistent_notification
from homeassistant.util import dt as dt_util
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.config_entries import ConfigEntry, SOURCE_IMPORT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.const import (
EVENT_HOMEASSISTANT_STARTED,
EVENT_SERVICE_REGISTERED,
EVENT_SERVICE_REMOVED,
EVENT_STATE_CHANGED,
EVENT_CALL_SERVICE,
STATE_UNKNOWN,
)
from .coordinator import WatchmanCoordinator
from .utils import (
is_service,
report,
parse,
table_renderer,
text_renderer,
get_config,
get_report_path,
)
from .const import (
DOMAIN,
DOMAIN_DATA,
DEFAULT_HEADER,
CONF_IGNORED_FILES,
CONF_HEADER,
CONF_REPORT_PATH,
CONF_IGNORED_ITEMS,
CONF_SERVICE_NAME,
CONF_SERVICE_DATA,
CONF_SERVICE_DATA2,
CONF_INCLUDED_FOLDERS,
CONF_CHECK_LOVELACE,
CONF_IGNORED_STATES,
CONF_CHUNK_SIZE,
CONF_CREATE_FILE,
CONF_SEND_NOTIFICATION,
CONF_PARSE_CONFIG,
CONF_COLUMNS_WIDTH,
CONF_STARTUP_DELAY,
CONF_FRIENDLY_NAMES,
CONF_ALLOWED_SERVICE_PARAMS,
CONF_TEST_MODE,
EVENT_AUTOMATION_RELOADED,
EVENT_SCENE_RELOADED,
TRACKED_EVENT_DOMAINS,
MONITORED_STATES,
PLATFORMS,
VERSION,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_REPORT_PATH): cv.string,
vol.Optional(CONF_IGNORED_FILES): cv.ensure_list,
vol.Optional(CONF_IGNORED_ITEMS): cv.ensure_list,
vol.Optional(CONF_HEADER, default=DEFAULT_HEADER): cv.string,
vol.Optional(CONF_SERVICE_NAME): cv.string,
vol.Optional(CONF_SERVICE_DATA): vol.Schema({}, extra=vol.ALLOW_EXTRA),
vol.Optional(CONF_INCLUDED_FOLDERS): cv.ensure_list,
vol.Optional(CONF_CHECK_LOVELACE, default=False): cv.boolean,
vol.Optional(CONF_CHUNK_SIZE, default=3500): cv.positive_int,
vol.Optional(CONF_IGNORED_STATES): [
"missing",
"unavailable",
"unknown",
],
vol.Optional(CONF_COLUMNS_WIDTH): cv.ensure_list,
vol.Optional(CONF_STARTUP_DELAY, default=0): cv.positive_int,
vol.Optional(CONF_FRIENDLY_NAMES, default=False): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistantType, config: dict):
"""Set up is called when Home Assistant is loading our component."""
if config.get(DOMAIN) is None:
# We get here if the integration is set up using config flow
return True
hass.data.setdefault(DOMAIN_DATA, config[DOMAIN])
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=hass.data[DOMAIN_DATA]
)
)
# Return boolean to indicate that initialization was successful.
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Set up this integration using UI"""
_LOGGER.debug(entry.options)
_LOGGER.debug("Home assistant path: %s", hass.config.path(""))
coordinator = WatchmanCoordinator(hass, _LOGGER, name=entry.title)
coordinator.async_set_updated_data(None)
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
hass.data[DOMAIN]["coordinator"] = coordinator
hass.data[DOMAIN_DATA] = entry.options # TODO: refactor
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
await add_services(hass)
await add_event_handlers(hass)
if hass.is_running:
# integration reloaded or options changed via UI
parse_config(hass, reason="changes in watchman configuration")
await coordinator.async_config_entry_first_refresh()
else:
# first run, home assistant is loading
# parse_config will be scheduled once HA is fully loaded
_LOGGER.info("Watchman started [%s]", VERSION)
# resources = hass.data["lovelace"]["resources"]
# await resources.async_get_info()
# for itm in resources.async_items():
# _LOGGER.debug(itm)
return True
async def update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Reload integration when options changed"""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(
hass: HomeAssistant, config_entry
): # pylint: disable=unused-argument
"""Handle integration unload"""
for cancel_handle in hass.data[DOMAIN].get("cancel_handlers", []):
if cancel_handle:
cancel_handle()
if hass.services.has_service(DOMAIN, "report"):
hass.services.async_remove(DOMAIN, "report")
unload_ok = await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
)
if DOMAIN_DATA in hass.data:
hass.data.pop(DOMAIN_DATA)
if DOMAIN in hass.data:
hass.data.pop(DOMAIN)
if unload_ok:
_LOGGER.info("Watchman integration successfully unloaded.")
else:
_LOGGER.error("Having trouble unloading watchman integration")
return unload_ok
async def add_services(hass: HomeAssistant):
"""adds report service"""
async def async_handle_report(call):
"""Handle the service call"""
config = hass.data.get(DOMAIN_DATA, {})
path = get_report_path(hass, config.get(CONF_REPORT_PATH, None))
send_notification = call.data.get(CONF_SEND_NOTIFICATION, False)
create_file = call.data.get(CONF_CREATE_FILE, True)
test_mode = call.data.get(CONF_TEST_MODE, False)
# validate service params
for param in call.data:
if param not in CONF_ALLOWED_SERVICE_PARAMS:
await async_notification(
hass,
"Watchman error",
f"Unknown service " f"parameter: `{param}`.",
error=True,
)
if not (send_notification or create_file):
message = (
"Either `send_nofification` or `create_file` should be set to `true` "
"in service parameters."
)
await async_notification(hass, "Watchman error", message, error=True)
if call.data.get(CONF_PARSE_CONFIG, False):
parse_config(hass, reason="service call")
if send_notification:
chunk_size = call.data.get(CONF_CHUNK_SIZE, config.get(CONF_CHUNK_SIZE))
service = call.data.get(CONF_SERVICE_NAME, None)
service_data = call.data.get(CONF_SERVICE_DATA, None)
if service_data and not service:
await async_notification(
hass,
"Watchman error",
"Missing `service` parameter. The `data` parameter can only be used "
"in conjunction with `service` parameter.",
error=True,
)
if onboarding(hass, service, path):
await async_notification(
hass,
"🖖 Achievement unlocked: first report!",
f"Your first watchman report was stored in `{path}` \n\n "
"TIP: set `service` parameter in configuration.yaml file to "
"receive report via notification service of choice. \n\n "
"This is one-time message, it will not bother you in the future.",
)
else:
await async_report_to_notification(
hass, service, service_data, chunk_size
)
if create_file:
try:
await async_report_to_file(hass, path, test_mode=test_mode)
except OSError as exception:
await async_notification(
hass,
"Watchman error",
f"Unable to write report: {exception}",
error=True,
)
hass.services.async_register(DOMAIN, "report", async_handle_report)
async def add_event_handlers(hass: HomeAssistant):
"""add event handlers"""
async def async_schedule_refresh_states(hass, delay):
"""schedule refresh of the sensors state"""
now = dt_util.utcnow()
next_interval = now + timedelta(seconds=delay)
async_track_point_in_utc_time(hass, async_delayed_refresh_states, next_interval)
async def async_delayed_refresh_states(timedate): # pylint: disable=unused-argument
"""refresh sensors state"""
# parse_config should be invoked beforehand
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
async def async_on_home_assistant_started(event): # pylint: disable=unused-argument
parse_config(hass, reason="HA restart")
startup_delay = get_config(hass, CONF_STARTUP_DELAY, 0)
await async_schedule_refresh_states(hass, startup_delay)
async def async_on_configuration_changed(event):
typ = event.event_type
if typ == EVENT_CALL_SERVICE:
domain = event.data.get("domain", None)
service = event.data.get("service", None)
if domain in TRACKED_EVENT_DOMAINS and service in [
"reload_core_config",
"reload",
]:
parse_config(hass, reason="configuration changes")
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
elif typ in [EVENT_AUTOMATION_RELOADED, EVENT_SCENE_RELOADED]:
parse_config(hass, reason="configuration changes")
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
async def async_on_service_changed(event):
service = f"{event.data['domain']}.{event.data['service']}"
if service in hass.data[DOMAIN].get("service_list", []):
_LOGGER.debug("Monitored service changed: %s", service)
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
async def async_on_state_changed(event):
"""refresh monitored entities on state change"""
def state_or_missing(state_id):
"""return missing state if entity not found"""
return "missing" if not event.data[state_id] else event.data[state_id].state
if event.data["entity_id"] in hass.data[DOMAIN].get("entity_list", []):
ignored_states = get_config(hass, CONF_IGNORED_STATES, [])
old_state = state_or_missing("old_state")
new_state = state_or_missing("new_state")
checked_states = set(MONITORED_STATES) - set(ignored_states)
if new_state in checked_states or old_state in checked_states:
_LOGGER.debug("Monitored entity changed: %s", event.data["entity_id"])
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
# hass is not started yet, schedule config parsing once it loaded
if not hass.is_running:
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STARTED, async_on_home_assistant_started
)
hdlr = []
hdlr.append(
hass.bus.async_listen(EVENT_CALL_SERVICE, async_on_configuration_changed)
)
hdlr.append(
hass.bus.async_listen(EVENT_AUTOMATION_RELOADED, async_on_configuration_changed)
)
hdlr.append(
hass.bus.async_listen(EVENT_SCENE_RELOADED, async_on_configuration_changed)
)
hdlr.append(
hass.bus.async_listen(EVENT_SERVICE_REGISTERED, async_on_service_changed)
)
hdlr.append(hass.bus.async_listen(EVENT_SERVICE_REMOVED, async_on_service_changed))
hdlr.append(hass.bus.async_listen(EVENT_STATE_CHANGED, async_on_state_changed))
hass.data[DOMAIN]["cancel_handlers"] = hdlr
def parse_config(hass: HomeAssistant, reason=None):
"""parse home assistant configuration files"""
assert hass.data.get(DOMAIN_DATA)
start_time = time.time()
included_folders = get_included_folders(hass)
ignored_files = hass.data[DOMAIN_DATA].get(CONF_IGNORED_FILES, None)
entity_list, service_list, files_parsed, files_ignored = parse(
hass, included_folders, ignored_files, hass.config.config_dir
)
hass.data[DOMAIN]["entity_list"] = entity_list
hass.data[DOMAIN]["service_list"] = service_list
hass.data[DOMAIN]["files_parsed"] = files_parsed
hass.data[DOMAIN]["files_ignored"] = files_ignored
hass.data[DOMAIN]["parse_duration"] = time.time() - start_time
_LOGGER.info(
"%s files parsed and %s files ignored in %.2fs. due to %s",
files_parsed,
files_ignored,
hass.data[DOMAIN]["parse_duration"],
reason,
)
def get_included_folders(hass):
"""gather the list of folders to parse"""
folders = []
config_folders = [hass.config.config_dir]
if DOMAIN_DATA in hass.data:
config_folders = hass.data[DOMAIN_DATA].get("included_folders")
if not config_folders:
config_folders = [hass.config.config_dir]
for fld in config_folders:
folders.append(os.path.join(fld, "**/*.yaml"))
if DOMAIN_DATA in hass.data and hass.data[DOMAIN_DATA].get(CONF_CHECK_LOVELACE):
folders.append(os.path.join(hass.config.config_dir, ".storage/**/lovelace*"))
return folders
async def async_report_to_file(hass, path, test_mode):
"""save report to a file"""
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
report_chunks = report(hass, table_renderer, chunk_size=0, test_mode=test_mode)
# OSError exception is handled in async_handle_report
with open(path, "w", encoding="utf-8") as report_file:
for chunk in report_chunks:
report_file.write(chunk)
async def async_report_to_notification(hass, service_str, service_data, chunk_size):
"""send report via notification service"""
if not service_str:
service_str = get_config(hass, CONF_SERVICE_NAME, None)
service_data = get_config(hass, CONF_SERVICE_DATA2, None)
if not service_str:
await async_notification(
hass,
"Watchman Error",
"You should specify `service` parameter (in integration options or as `service` "
"parameter) in order to send report via notification",
)
return
if not is_service(hass, service_str):
await async_notification(
hass,
"Watchman Error",
f"{service_str} is not a valid service for notification",
)
domain = service_str.split(".")[0]
service = ".".join(service_str.split(".")[1:])
data = {} if service_data is None else json.loads(service_data)
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
report_chunks = report(hass, text_renderer, chunk_size)
for chunk in report_chunks:
data["message"] = chunk
# blocking=True ensures execution order
if not await hass.services.async_call(domain, service, data, blocking=True):
_LOGGER.error(
"Unable to call service %s.%s due to an error.", domain, service
)
break
async def async_notification(hass, title, message, error=False, n_id="watchman"):
"""Show a persistent notification"""
persistent_notification.async_create(
hass,
message,
title=title,
notification_id=n_id,
)
if error:
raise HomeAssistantError(message.replace("`", ""))
def onboarding(hass, service, path):
"""check if the user runs report for the first time"""
service = service or get_config(hass, CONF_SERVICE_NAME, None)
return not (service or os.path.exists(path))

View File

@@ -0,0 +1,291 @@
"ConfigFlow definition for watchman"
from typing import Dict
import json
from json.decoder import JSONDecodeError
import logging
from homeassistant.config_entries import ConfigFlow, OptionsFlow, ConfigEntry
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, selector
import voluptuous as vol
from .utils import is_service, get_columns_width, get_report_path
from .const import (
DOMAIN,
CONF_IGNORED_FILES,
CONF_HEADER,
CONF_REPORT_PATH,
CONF_IGNORED_ITEMS,
CONF_SERVICE_NAME,
CONF_SERVICE_DATA,
CONF_SERVICE_DATA2,
CONF_INCLUDED_FOLDERS,
CONF_CHECK_LOVELACE,
CONF_IGNORED_STATES,
CONF_CHUNK_SIZE,
CONF_COLUMNS_WIDTH,
CONF_STARTUP_DELAY,
CONF_FRIENDLY_NAMES,
)
DEFAULT_DATA = {
CONF_SERVICE_NAME: "",
CONF_SERVICE_DATA2: "{}",
CONF_INCLUDED_FOLDERS: ["/config"],
CONF_HEADER: "-== Watchman Report ==-",
CONF_REPORT_PATH: "",
CONF_IGNORED_ITEMS: [],
CONF_IGNORED_STATES: [],
CONF_CHUNK_SIZE: 3500,
CONF_IGNORED_FILES: [],
CONF_CHECK_LOVELACE: False,
CONF_COLUMNS_WIDTH: [30, 7, 60],
CONF_STARTUP_DELAY: 0,
CONF_FRIENDLY_NAMES: False,
}
INCLUDED_FOLDERS_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
IGNORED_ITEMS_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
IGNORED_STATES_SCHEMA = vol.Schema(["missing", "unavailable", "unknown"])
IGNORED_FILES_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
COLUMNS_WIDTH_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.positive_int]))
_LOGGER = logging.getLogger(__name__)
class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
"""Config flow"""
async def async_step_user(self, user_input=None):
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title="Watchman", data={}, options=DEFAULT_DATA)
async def async_step_import(self, import_data):
"""Import configuration.yaml settings as OptionsEntry"""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
# change "data" key from configuration.yaml to "service_data" as "data" is reserved by
# OptionsFlow
import_data[CONF_SERVICE_DATA2] = import_data.get(CONF_SERVICE_DATA, {})
if CONF_SERVICE_DATA in import_data:
import_data.pop(CONF_SERVICE_DATA)
_LOGGER.info(
"watchman settings imported successfully and can be removed from "
"configuration.yaml"
)
_LOGGER.debug("configuration.yaml settings successfully imported to UI options")
return self.async_create_entry(
title="configuration.yaml", data={}, options=import_data
)
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
class OptionsFlowHandler(OptionsFlow):
"""Handles options flow for the component."""
def __init__(self, config_entry: ConfigEntry) -> None:
self.config_entry = config_entry
def default(self, key, uinput=None):
"""provide default value for an OptionsFlow field"""
if uinput and key in uinput:
# supply last entered value to display an error during form validation
result = uinput[key]
else:
# supply last saved value or default one
result = self.config_entry.options.get(key, DEFAULT_DATA[key])
if result == "":
# some default values cannot be empty
if DEFAULT_DATA[key]:
result = DEFAULT_DATA[key]
elif key == CONF_REPORT_PATH:
result = get_report_path(self.hass, None)
if isinstance(result, list):
return ", ".join([str(i) for i in result])
if isinstance(result, dict):
return json.dumps(result)
if isinstance(result, bool):
return result
return str(result)
def to_list(self, user_input, key):
"""validate user input against list requirements"""
errors: Dict[str, str] = {}
if key not in user_input:
return DEFAULT_DATA[key], errors
val = user_input[key]
val = [x.strip() for x in val.split(",") if x.strip()]
try:
val = INCLUDED_FOLDERS_SCHEMA(val)
except vol.Invalid:
errors[key] = f"invalid_{key}"
return val, errors
async def _show_options_form(
self, uinput=None, errors=None, placehoders=None
): # pylint: disable=unused-argument
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_SERVICE_NAME,
description={
"suggested_value": self.default(CONF_SERVICE_NAME, uinput)
},
): cv.string,
vol.Optional(
CONF_SERVICE_DATA2,
description={
"suggested_value": self.default(CONF_SERVICE_DATA2, uinput)
},
): selector.TemplateSelector(),
vol.Optional(
CONF_INCLUDED_FOLDERS,
description={
"suggested_value": self.default(
CONF_INCLUDED_FOLDERS, uinput
)
},
): selector.TextSelector(
selector.TextSelectorConfig(multiline=True)
),
vol.Optional(
CONF_HEADER,
description={
"suggested_value": self.default(CONF_HEADER, uinput)
},
): cv.string,
vol.Optional(
CONF_REPORT_PATH,
description={
"suggested_value": self.default(CONF_REPORT_PATH, uinput)
},
): cv.string,
vol.Optional(
CONF_IGNORED_ITEMS,
description={
"suggested_value": self.default(CONF_IGNORED_ITEMS, uinput)
},
): selector.TextSelector(
selector.TextSelectorConfig(multiline=True)
),
vol.Optional(
CONF_IGNORED_STATES,
description={
"suggested_value": self.default(CONF_IGNORED_STATES, uinput)
},
): selector.TextSelector(
selector.TextSelectorConfig(multiline=True)
),
vol.Optional(
CONF_CHUNK_SIZE,
description={
"suggested_value": self.default(CONF_CHUNK_SIZE, uinput)
},
): cv.positive_int,
vol.Optional(
CONF_IGNORED_FILES,
description={
"suggested_value": self.default(CONF_IGNORED_FILES, uinput)
},
): selector.TextSelector(
selector.TextSelectorConfig(multiline=True)
),
vol.Optional(
CONF_COLUMNS_WIDTH,
description={
"suggested_value": self.default(CONF_COLUMNS_WIDTH, uinput)
},
): cv.string,
vol.Optional(
CONF_STARTUP_DELAY,
description={
"suggested_value": self.default(CONF_STARTUP_DELAY, uinput)
},
): cv.positive_int,
vol.Optional(
CONF_FRIENDLY_NAMES,
description={
"suggested_value": self.default(CONF_FRIENDLY_NAMES, uinput)
},
): cv.boolean,
vol.Optional(
CONF_CHECK_LOVELACE,
description={
"suggested_value": self.default(CONF_CHECK_LOVELACE, uinput)
},
): cv.boolean,
}
),
errors=errors or {},
description_placeholders=placehoders or {},
)
async def async_step_init(self, user_input=None):
"""Manage the options"""
errors: Dict[str, str] = {}
placehoders: Dict[str, str] = {}
if user_input is not None:
user_input[CONF_INCLUDED_FOLDERS], err = self.to_list(
user_input, CONF_INCLUDED_FOLDERS
)
errors |= err
user_input[CONF_IGNORED_ITEMS], err = self.to_list(
user_input, CONF_IGNORED_ITEMS
)
errors |= err
ignored_states, err = self.to_list(user_input, CONF_IGNORED_STATES)
errors |= err
try:
user_input[CONF_IGNORED_STATES] = IGNORED_STATES_SCHEMA(ignored_states)
except vol.Invalid:
errors[CONF_IGNORED_STATES] = "wrong_value_ignored_states"
user_input[CONF_IGNORED_FILES], err = self.to_list(
user_input, CONF_IGNORED_FILES
)
errors |= err
if CONF_COLUMNS_WIDTH in user_input:
columns_width = user_input[CONF_COLUMNS_WIDTH]
try:
columns_width = [
int(x) for x in columns_width.split(",") if x.strip()
]
if len(columns_width) != 3:
raise ValueError()
columns_width = COLUMNS_WIDTH_SCHEMA(columns_width)
user_input[CONF_COLUMNS_WIDTH] = get_columns_width(columns_width)
except (ValueError, vol.Invalid):
errors[CONF_COLUMNS_WIDTH] = "invalid_columns_width"
if CONF_SERVICE_DATA2 in user_input:
try:
result = json.loads(user_input[CONF_SERVICE_DATA2])
if not isinstance(result, dict):
errors[CONF_SERVICE_DATA2] = "malformed_json"
except JSONDecodeError:
errors[CONF_SERVICE_DATA2] = "malformed_json"
if CONF_SERVICE_NAME in user_input:
if not is_service(self.hass, user_input[CONF_SERVICE_NAME]):
errors[CONF_SERVICE_NAME] = "unknown_service"
placehoders["service"] = user_input[CONF_SERVICE_NAME]
if not errors:
return self.async_create_entry(title="", data=user_input)
else:
# provide last entered values to display error
return await self._show_options_form(user_input, errors, placehoders)
# provide default values
return await self._show_options_form()

View File

@@ -0,0 +1,72 @@
"definition of constants"
from homeassistant.const import Platform
DOMAIN = "watchman"
DOMAIN_DATA = "watchman_data"
VERSION = "0.6.1"
DEFAULT_REPORT_FILENAME = "watchman_report.txt"
DEFAULT_HEADER = "-== WATCHMAN REPORT ==- "
DEFAULT_CHUNK_SIZE = 3500
CONF_IGNORED_FILES = "ignored_files"
CONF_HEADER = "report_header"
CONF_REPORT_PATH = "report_path"
CONF_IGNORED_ITEMS = "ignored_items"
CONF_SERVICE_NAME = "service"
CONF_SERVICE_DATA = "data"
CONF_SERVICE_DATA2 = "service_data"
CONF_INCLUDED_FOLDERS = "included_folders"
CONF_CHECK_LOVELACE = "check_lovelace"
CONF_IGNORED_STATES = "ignored_states"
CONF_CHUNK_SIZE = "chunk_size"
CONF_CREATE_FILE = "create_file"
CONF_SEND_NOTIFICATION = "send_notification"
CONF_PARSE_CONFIG = "parse_config"
CONF_COLUMNS_WIDTH = "columns_width"
CONF_STARTUP_DELAY = "startup_delay"
CONF_FRIENDLY_NAMES = "friendly_names"
CONF_TEST_MODE = "test_mode"
# configuration parameters allowed in watchman.report service data
CONF_ALLOWED_SERVICE_PARAMS = [
CONF_SERVICE_NAME,
CONF_CHUNK_SIZE,
CONF_CREATE_FILE,
CONF_SEND_NOTIFICATION,
CONF_PARSE_CONFIG,
CONF_SERVICE_DATA,
CONF_TEST_MODE,
]
EVENT_AUTOMATION_RELOADED = "automation_reloaded"
EVENT_SCENE_RELOADED = "scene_reloaded"
SENSOR_LAST_UPDATE = "watchman_last_updated"
SENSOR_MISSING_ENTITIES = "watchman_missing_entities"
SENSOR_MISSING_SERVICES = "watchman_missing_services"
MONITORED_STATES = ["unavailable", "unknown", "missing"]
TRACKED_EVENT_DOMAINS = [
"homeassistant",
"input_boolean",
"input_button",
"input_select",
"input_number",
"input_datetime",
"person",
"input_text",
"script",
"timer",
"zone",
]
BUNDLED_IGNORED_ITEMS = [
"timer.cancelled",
"timer.finished",
"timer.started",
"timer.restarted",
"timer.paused",
]
# Platforms
PLATFORMS = [Platform.SENSOR]

View File

@@ -0,0 +1,70 @@
"""Data update coordinator for Watchman"""
import logging
import time
from homeassistant.util import dt as dt_util
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN
from .utils import check_entitites, check_services, get_entity_state, fill
_LOGGER = logging.getLogger(__name__)
class WatchmanCoordinator(DataUpdateCoordinator):
"""My custom coordinator."""
def __init__(self, hass, logger, name):
"""Initialize watchmman coordinator."""
super().__init__(
hass,
_LOGGER,
name=name, # Name of the data. For logging purposes.
)
self.hass = hass
self.data = {}
async def _async_update_data(self) -> None:
"""Fetch data from API endpoint."""
start_time = time.time()
services_missing = check_services(self.hass)
entities_missing = check_entitites(self.hass)
self.hass.data[DOMAIN]["check_duration"] = time.time() - start_time
self.hass.data[DOMAIN]["entities_missing"] = entities_missing
self.hass.data[DOMAIN]["services_missing"] = services_missing
# build entity attributes map for missing_entities sensor
entity_attrs = []
entity_list = self.hass.data[DOMAIN]["entity_list"]
for entity in entities_missing:
state, name = get_entity_state(self.hass, entity, friendly_names=True)
entity_attrs.append(
{
"id": entity,
"state": state,
"friendly_name": name or "",
"occurrences": fill(entity_list[entity], 0),
}
)
# build service attributes map for missing_services sensor
service_attrs = []
service_list = self.hass.data[DOMAIN]["service_list"]
for service in services_missing:
service_attrs.append(
{"id": service, "occurrences": fill(service_list[service], 0)}
)
self.data = {
"entities_missing": len(entities_missing),
"services_missing": len(services_missing),
"last_update": dt_util.now(),
"service_attrs": service_attrs,
"entity_attrs": entity_attrs,
}
_LOGGER.debug("Watchman sensors updated")
_LOGGER.debug("entities missing: %s", len(entities_missing))
_LOGGER.debug("services missing: %s", len(services_missing))
return self.data

View File

@@ -0,0 +1,36 @@
"""Represents Watchman service in the device registry of Home Assistant"""
from homeassistant.helpers.entity import DeviceInfo, EntityDescription
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from .const import DOMAIN, VERSION
class WatchmanEntity(CoordinatorEntity):
"""Representation of a Watchman entity."""
def __init__(
self,
coordinator: DataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize Watchman entity."""
super().__init__(coordinator)
self.entity_description = entity_description
# per sensor unique_id
self._attr_unique_id = (
f"{coordinator.config_entry.entry_id}_{entity_description.key}"
)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, "watchman_unique_id")},
manufacturer="dummylabs",
model="Watchman",
name="Watchman",
sw_version=VERSION,
entry_type=DeviceEntryType.SERVICE,
configuration_url="https://github.com/dummylabs/thewatchman",
)
self._attr_extra_state_attributes = {}

View File

@@ -0,0 +1,15 @@
{
"domain": "watchman",
"name": "Watchman",
"documentation": "https://github.com/dummylabs/thewatchman",
"issue_tracker": "https://github.com/dummylabs/thewatchman/issues",
"iot_class": "local_push",
"version": "0.5.1",
"requirements": [
"prettytable==3.0.0"
],
"codeowners": [
"@dummylabs"
],
"config_flow": true
}

View File

@@ -0,0 +1,161 @@
"""Watchman sensors definition"""
import logging
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.core import callback
from .entity import WatchmanEntity
from .const import (
DOMAIN,
SENSOR_LAST_UPDATE,
SENSOR_MISSING_ENTITIES,
SENSOR_MISSING_SERVICES,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_devices):
"""Setup sensor platform."""
coordinator = hass.data[DOMAIN][entry.entry_id]
async_add_devices(
[
LastUpdateSensor(
coordinator=coordinator,
entity_description=SensorEntityDescription(
key=SENSOR_LAST_UPDATE,
name=SENSOR_LAST_UPDATE,
device_class=SensorDeviceClass.TIMESTAMP,
),
),
MissingEntitiesSensor(
coordinator=coordinator,
entity_description=SensorEntityDescription(
key=SENSOR_MISSING_ENTITIES,
name=SENSOR_MISSING_ENTITIES,
state_class=SensorStateClass.MEASUREMENT,
),
),
MissingServicesSensor(
coordinator=coordinator,
entity_description=SensorEntityDescription(
key=SENSOR_MISSING_SERVICES,
name=SENSOR_MISSING_SERVICES,
state_class=SensorStateClass.MEASUREMENT,
),
),
]
)
class LastUpdateSensor(WatchmanEntity, SensorEntity):
"""Timestamp sensor for last watchman update time"""
_attr_should_poll = False
_attr_icon = "mdi:shield-half-full"
@property
def should_poll(self) -> bool:
"""No polling needed."""
return False
@property
def native_value(self):
"""Return the native value of the sensor."""
if self.coordinator.data:
return self.coordinator.data["last_update"]
else:
return self._attr_native_value
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.coordinator.data:
self._attr_native_value = self.coordinator.data["last_update"]
self.async_write_ha_state()
super()._handle_coordinator_update()
class MissingEntitiesSensor(WatchmanEntity, SensorEntity):
"""Number of missing entities from watchman report"""
_attr_should_poll = False
_attr_icon = "mdi:shield-half-full"
_attr_native_unit_of_measurement = "items"
@property
def should_poll(self) -> bool:
"""No polling needed."""
return False
@property
def native_value(self):
"""Return the native value of the sensor."""
if self.coordinator.data:
return self.coordinator.data["entities_missing"]
else:
return self._attr_native_value
@property
def extra_state_attributes(self):
"""Return the state attributes."""
if self.coordinator.data:
return {"entities": self.coordinator.data["entity_attrs"]}
else:
return {}
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.coordinator.data:
self._attr_native_value = self.coordinator.data["entities_missing"]
self._attr_extra_state_attributes = {
"entities": self.coordinator.data["entity_attrs"]
}
self.async_write_ha_state()
super()._handle_coordinator_update()
class MissingServicesSensor(WatchmanEntity, SensorEntity):
"""Number of missing services from watchman report"""
_attr_should_poll = False
_attr_icon = "mdi:shield-half-full"
_attr_native_unit_of_measurement = "items"
@property
def should_poll(self) -> bool:
"""No polling needed."""
return False
@property
def native_value(self):
"""Return the native value of the sensor."""
if self.coordinator.data:
return self.coordinator.data["services_missing"]
else:
return self._attr_native_value
@property
def extra_state_attributes(self):
"""Return the state attributes."""
if self.coordinator.data:
return {"entities": self.coordinator.data["service_attrs"]}
else:
return {}
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.coordinator.data:
self._attr_native_value = self.coordinator.data["services_missing"]
self._attr_extra_state_attributes = {
"services": self.coordinator.data["service_attrs"]
}
self.async_write_ha_state()
super()._handle_coordinator_update()

View File

@@ -0,0 +1,50 @@
report:
description: Run watchman report
fields:
create_file:
description: Whether report file should be created (optional, true by default)
example: true
name: Create file report
default: true
required: false
selector:
boolean:
send_notification:
description: Whether report should be sent via notification service (optional, false by default)
example: true
name: Send notification
default: false
required: false
selector:
boolean:
service:
description: Notification service to send report via (optional). Overrides "service" setting from watchman configuration
example: "notify.telegram"
name: Notification service
required: false
selector:
text:
data:
description: Additional data in form of key:value pairs for notification service (optional)
example: "parse_mode: html"
name: Notification service data parameters
parse_config:
description: Parse configuration files before report is created. Usually this is done by watchman automatically, so this flag is not required. (optional, false by default)
example: true
name: Parse configuration
default: false
required: false
selector:
boolean:
chunk_size:
description: Maximum message size in bytes. If report size exceeds chunk_size, the report will be sent in several subsequent notifications. (optional, default is 3500 or whatever specified in integration settings)
example: true
name: Chunk size
default: false
required: false
selector:
number:
min: 0
max: 100000
mode: box

View File

@@ -0,0 +1,45 @@
{
"config": {
"abort": {
"single_instance_allowed": "Only one instance of watchman is allowed"
},
"step": {}
},
"options": {
"error": {
"invalid_included_folders": "included_folders should be a comma separated list of configuration folders",
"invalid_columns_width": "columns_width should be a list of 3 positive integers",
"wrong_value_ignored_states": "Accepted values are: 'unavailable', 'missing' and 'unknown'",
"malformed_json": "service data should be a valid json dictionary",
"unknown_service": "unknown service: `{service}`"
},
"step": {
"init": {
"title": "Watchman settings",
"data": {
"service": "Notification service (e.g. notify.telegram)",
"service_data": "Notification service data",
"included_folders": "Included folders",
"report_header": "Custom header for the report",
"report_path": "Report location e.g. /config/report.txt",
"ignored_items": "Ignored entities and services",
"ignored_states": "Ignored entity states",
"chunk_size": "Message chunk size in bytes (used with notification service)",
"ignored_files": "Ignored files (comma-separated)",
"check_lovelace": "Parse dashboards UI (ex-Lovelace) configuration",
"columns_width": "List of report columns width, e.g. 30, 7, 60",
"startup_delay": "Startup delay for watchman sensors initialization",
"friendly_names": "Add friendly names to the report"
},
"data_description": {
"service_data": "JSON object with notification service data, see documentation for details",
"included_folders": "Comma-separated list of folders where watchman should look for config files",
"ignored_items": "Comma-separated list of entities and services excluded from tracking",
"ignored_states": "Comma-separated list of the states excluded from tracking",
"ignored_files": "Comma-separated list of config files excluded from tracking"
},
"description": "[Help on settings](https://github.com/dummylabs/thewatchman#configuration)"
}
}
}
}

View File

@@ -0,0 +1,385 @@
"""Miscellaneous support functions for watchman"""
import glob
import re
import fnmatch
import time
import logging
from datetime import datetime
from textwrap import wrap
import os
from typing import Any
import pytz
from prettytable import PrettyTable
from homeassistant.exceptions import HomeAssistantError
from homeassistant.core import HomeAssistant
from .const import (
DOMAIN,
DOMAIN_DATA,
DEFAULT_HEADER,
DEFAULT_CHUNK_SIZE,
CONF_HEADER,
CONF_IGNORED_ITEMS,
CONF_IGNORED_STATES,
CONF_CHUNK_SIZE,
CONF_COLUMNS_WIDTH,
CONF_FRIENDLY_NAMES,
BUNDLED_IGNORED_ITEMS,
DEFAULT_REPORT_FILENAME,
)
_LOGGER = logging.getLogger(__name__)
async def read_file(hass: HomeAssistant, path: str) -> Any:
"""Read a file."""
def read():
with open(hass.config.path(path), "r", encoding="utf-8") as open_file:
return open_file.read()
return await hass.async_add_executor_job(read)
async def write_file(hass: HomeAssistant, path: str, content: Any) -> None:
"""Write a file."""
def write():
with open(hass.config.path(path), "w", encoding="utf-8") as open_file:
open_file.write(content)
await hass.async_add_executor_job(write)
def get_config(hass: HomeAssistant, key, default):
"""get configuration value"""
if DOMAIN_DATA not in hass.data:
return default
return hass.data[DOMAIN_DATA].get(key, default)
def get_report_path(hass, path):
"""if path not specified, create report in config directory with default filename"""
if not path:
path = hass.config.path(DEFAULT_REPORT_FILENAME)
folder, _ = os.path.split(path)
if not os.path.exists(folder):
raise HomeAssistantError(f"Incorrect report_path: {path}.")
return path
def get_columns_width(user_width):
"""define width of the report columns"""
default_width = [30, 7, 60]
if not user_width:
return default_width
try:
return [7 if user_width[i] < 7 else user_width[i] for i in range(3)]
except (TypeError, IndexError):
_LOGGER.error(
"Invalid configuration for table column widths, default values" " used %s",
default_width,
)
return default_width
def table_renderer(hass, entry_type):
"""Render ASCII tables in the report"""
table = PrettyTable()
columns_width = get_config(hass, CONF_COLUMNS_WIDTH, None)
columns_width = get_columns_width(columns_width)
if entry_type == "service_list":
services_missing = hass.data[DOMAIN]["services_missing"]
service_list = hass.data[DOMAIN]["service_list"]
table.field_names = ["Service ID", "State", "Location"]
for service in services_missing:
row = [
fill(service, columns_width[0]),
fill("missing", columns_width[1]),
fill(service_list[service], columns_width[2]),
]
table.add_row(row)
table.align = "l"
return table.get_string()
elif entry_type == "entity_list":
entities_missing = hass.data[DOMAIN]["entities_missing"]
entity_list = hass.data[DOMAIN]["entity_list"]
friendly_names = get_config(hass, CONF_FRIENDLY_NAMES, False)
header = ["Entity ID", "State", "Location"]
table.field_names = header
for entity in entities_missing:
state, name = get_entity_state(hass, entity, friendly_names)
table.add_row(
[
fill(entity, columns_width[0], name),
fill(state, columns_width[1]),
fill(entity_list[entity], columns_width[2]),
]
)
table.align = "l"
return table.get_string()
else:
return f"Table render error: unknown entry type: {entry_type}"
def text_renderer(hass, entry_type):
"""Render plain lists in the report"""
result = ""
if entry_type == "service_list":
services_missing = hass.data[DOMAIN]["services_missing"]
service_list = hass.data[DOMAIN]["service_list"]
for service in services_missing:
result += f"{service} in {fill(service_list[service], 0)}\n"
return result
elif entry_type == "entity_list":
entities_missing = hass.data[DOMAIN]["entities_missing"]
entity_list = hass.data[DOMAIN]["entity_list"]
friendly_names = get_config(hass, CONF_FRIENDLY_NAMES, False)
for entity in entities_missing:
state, name = get_entity_state(hass, entity, friendly_names)
entity_col = entity if not name else f"{entity} ('{name}')"
result += f"{entity_col} [{state}] in: {fill(entity_list[entity], 0)}\n"
return result
else:
return f"Text render error: unknown entry type: {entry_type}"
def get_next_file(folder_list, ignored_files):
"""Returns next file for scan"""
if not ignored_files:
ignored_files = ""
else:
ignored_files = "|".join([f"({fnmatch.translate(f)})" for f in ignored_files])
ignored_files_re = re.compile(ignored_files)
for folder in folder_list:
for filename in glob.iglob(folder, recursive=True):
yield (filename, (ignored_files and ignored_files_re.match(filename)))
def add_entry(_list, entry, yaml_file, lineno):
"""Add entry to list of missing entities/services with line number information"""
_LOGGER.debug("Added %s to the list", entry)
if entry in _list:
if yaml_file in _list[entry]:
_list[entry].get(yaml_file, []).append(lineno)
else:
_list[entry] = {yaml_file: [lineno]}
def is_service(hass, entry):
"""check whether config entry is a service"""
domain, service = entry.split(".")[0], ".".join(entry.split(".")[1:])
return hass.services.has_service(domain, service)
def get_entity_state(hass, entry, friendly_names=False):
"""returns entity state or missing if entity does not extst"""
entity = hass.states.get(entry)
name = None
if entity and entity.attributes.get("friendly_name", None):
if friendly_names:
name = entity.name
# fix for #75, some integrations return non-string states
state = (
"missing" if not entity else str(entity.state).replace("unavailable", "unavail")
)
return state, name
def check_services(hass):
"""check if entries from config file are services"""
services_missing = {}
if "missing" in get_config(hass, CONF_IGNORED_STATES, []):
return services_missing
if DOMAIN not in hass.data or "service_list" not in hass.data[DOMAIN]:
raise HomeAssistantError("Service list not found")
service_list = hass.data[DOMAIN]["service_list"]
_LOGGER.debug("::check_services")
for entry, occurences in service_list.items():
if not is_service(hass, entry):
services_missing[entry] = occurences
_LOGGER.debug("service %s added to missing list", entry)
return services_missing
def check_entitites(hass):
"""check if entries from config file are entities with an active state"""
ignored_states = [
"unavail" if s == "unavailable" else s
for s in get_config(hass, CONF_IGNORED_STATES, [])
]
if DOMAIN not in hass.data or "entity_list" not in hass.data[DOMAIN]:
_LOGGER.error("Entity list not found")
raise Exception("Entity list not found")
entity_list = hass.data[DOMAIN]["entity_list"]
entities_missing = {}
_LOGGER.debug("::check_entities")
for entry, occurences in entity_list.items():
if is_service(hass, entry): # this is a service, not entity
_LOGGER.debug("entry %s is service, skipping", entry)
continue
state, _ = get_entity_state(hass, entry)
if state in ignored_states:
_LOGGER.debug("entry %s ignored due to ignored_states", entry)
continue
if state in ["missing", "unknown", "unavail"]:
entities_missing[entry] = occurences
_LOGGER.debug("entry %s added to missing list", entry)
return entities_missing
def parse(hass, folders, ignored_files, root=None):
"""Parse a yaml or json file for entities/services"""
files_parsed = 0
entity_pattern = re.compile(
r"(?:(?<=\s)|(?<=^)|(?<=\")|(?<=\'))([A-Za-z_0-9]*\s*:)?(?:\s*)?(?:states.)?"
r"((air_quality|alarm_control_panel|alert|automation|binary_sensor|button|calendar|camera|"
r"climate|counter|device_tracker|fan|group|humidifier|input_boolean|input_datetime|"
r"input_number|input_select|light|lock|media_player|number|person|plant|proximity|remote|"
r"scene|script|select|sensor|sun|switch|timer|vacuum|weather|zone)\.[A-Za-z_*0-9]+)"
)
service_pattern = re.compile(r"service:\s*([A-Za-z_0-9]*\.[A-Za-z_0-9]+)")
comment_pattern = re.compile(r"#.*")
entity_list = {}
service_list = {}
effectively_ignored = []
_LOGGER.debug("::parse started")
for yaml_file, ignored in get_next_file(folders, ignored_files):
short_path = os.path.relpath(yaml_file, root)
if ignored:
effectively_ignored.append(short_path)
_LOGGER.debug("%s ignored", yaml_file)
continue
try:
for i, line in enumerate(open(yaml_file, encoding="utf-8")):
line = re.sub(comment_pattern, "", line)
for match in re.finditer(entity_pattern, line):
typ, val = match.group(1), match.group(2)
if (
typ != "service:"
and "*" not in val
and not val.endswith(".yaml")
):
add_entry(entity_list, val, short_path, i + 1)
for match in re.finditer(service_pattern, line):
val = match.group(1)
add_entry(service_list, val, short_path, i + 1)
files_parsed += 1
_LOGGER.debug("%s parsed", yaml_file)
except OSError as exception:
_LOGGER.error("Unable to parse %s: %s", yaml_file, exception)
except UnicodeDecodeError as exception:
_LOGGER.error(
"Unable to parse %s: %s. Use UTF-8 encoding to avoid this error",
yaml_file,
exception,
)
# remove ignored entities and services from resulting lists
ignored_items = get_config(hass, CONF_IGNORED_ITEMS, [])
ignored_items = list(set(ignored_items + BUNDLED_IGNORED_ITEMS))
excluded_entities = []
excluded_services = []
for itm in ignored_items:
if itm:
excluded_entities.extend(fnmatch.filter(entity_list, itm))
excluded_services.extend(fnmatch.filter(service_list, itm))
entity_list = {k: v for k, v in entity_list.items() if k not in excluded_entities}
service_list = {k: v for k, v in service_list.items() if k not in excluded_services}
_LOGGER.debug("Parsed files: %s", files_parsed)
_LOGGER.debug("Ignored files: %s", effectively_ignored)
_LOGGER.debug("Found entities: %s", len(entity_list))
_LOGGER.debug("Found services: %s", len(service_list))
return (entity_list, service_list, files_parsed, len(effectively_ignored))
def fill(data, width, extra=None):
"""arrange data by table column width"""
if data and isinstance(data, dict):
key, val = next(iter(data.items()))
out = f"{key}:{','.join([str(v) for v in val])}"
else:
out = str(data) if not extra else f"{data} ('{extra}')"
return (
"\n".join([out.ljust(width) for out in wrap(out, width)]) if width > 0 else out
)
def report(hass, render, chunk_size, test_mode=False):
"""generates watchman report either as a table or as a list"""
if not DOMAIN in hass.data:
raise HomeAssistantError("No data for report, refresh required.")
start_time = time.time()
header = get_config(hass, CONF_HEADER, DEFAULT_HEADER)
services_missing = hass.data[DOMAIN]["services_missing"]
service_list = hass.data[DOMAIN]["service_list"]
entities_missing = hass.data[DOMAIN]["entities_missing"]
entity_list = hass.data[DOMAIN]["entity_list"]
files_parsed = hass.data[DOMAIN]["files_parsed"]
files_ignored = hass.data[DOMAIN]["files_ignored"]
chunk_size = (
get_config(hass, CONF_CHUNK_SIZE, DEFAULT_CHUNK_SIZE)
if chunk_size is None
else chunk_size
)
rep = f"{header} \n"
if services_missing:
rep += f"\n-== Missing {len(services_missing)} service(s) from "
rep += f"{len(service_list)} found in your config:\n"
rep += render(hass, "service_list")
rep += "\n"
elif len(service_list) > 0:
rep += f"\n-== Congratulations, all {len(service_list)} services from "
rep += "your config are available!\n"
else:
rep += "\n-== No services found in configuration files!\n"
if entities_missing:
rep += f"\n-== Missing {len(entities_missing)} entity(ies) from "
rep += f"{len(entity_list)} found in your config:\n"
rep += render(hass, "entity_list")
rep += "\n"
elif len(entity_list) > 0:
rep += f"\n-== Congratulations, all {len(entity_list)} entities from "
rep += "your config are available!\n"
else:
rep += "\n-== No entities found in configuration files!\n"
timezone = pytz.timezone(hass.config.time_zone)
if not test_mode:
report_datetime = datetime.now(timezone).strftime("%d %b %Y %H:%M:%S")
parse_duration = hass.data[DOMAIN]["parse_duration"]
check_duration = hass.data[DOMAIN]["check_duration"]
render_duration = time.time() - start_time
else:
report_datetime = "01 Jan 1970 00:00:00"
parse_duration = 0.01
check_duration = 0.105
render_duration = 0.0003
rep += f"\n-== Report created on {report_datetime}\n"
rep += (
f"-== Parsed {files_parsed} files in {parse_duration:.2f}s., "
f"ignored {files_ignored} files \n"
)
rep += f"-== Generated in: {render_duration:.2f}s. Validated in: {check_duration:.2f}s."
report_chunks = []
chunk = ""
for line in iter(rep.splitlines()):
chunk += f"{line}\n"
if chunk_size > 0 and len(chunk) > chunk_size:
report_chunks.append(chunk)
chunk = ""
if chunk:
report_chunks.append(chunk)
return report_chunks