Home Assistant Git Exporter
This commit is contained in:
330
config/custom_components/blitzortung/__init__.py
Normal file
330
config/custom_components/blitzortung/__init__.py
Normal file
@@ -0,0 +1,330 @@
|
||||
"""The blitzortung integration."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import math
|
||||
import time
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, UnitOfLength
|
||||
from homeassistant.core import callback, HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
|
||||
from homeassistant.util.unit_conversion import DistanceConverter
|
||||
from . import const
|
||||
from .const import (
|
||||
CONF_IDLE_RESET_TIMEOUT,
|
||||
CONF_MAX_TRACKED_LIGHTNINGS,
|
||||
CONF_RADIUS,
|
||||
CONF_TIME_WINDOW,
|
||||
DEFAULT_IDLE_RESET_TIMEOUT,
|
||||
DEFAULT_MAX_TRACKED_LIGHTNINGS,
|
||||
DEFAULT_RADIUS,
|
||||
DEFAULT_TIME_WINDOW,
|
||||
DEFAULT_UPDATE_INTERVAL,
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .geohash_utils import geohash_overlap
|
||||
from .mqtt import MQTT, MQTT_CONNECTED, MQTT_DISCONNECTED
|
||||
from .version import __version__
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: vol.Schema({vol.Optional(const.SERVER_STATS, default=False): bool})},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: dict):
|
||||
"""Initialize basic config of blitzortung component."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN]["config"] = config.get(DOMAIN) or {}
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry):
|
||||
"""Set up blitzortung from a config entry."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
config = hass.data[DOMAIN].get("config") or {}
|
||||
|
||||
latitude = config_entry.options.get(CONF_LATITUDE, hass.config.latitude)
|
||||
longitude = config_entry.options.get(CONF_LONGITUDE, hass.config.longitude)
|
||||
radius = config_entry.options.get(CONF_RADIUS, DEFAULT_RADIUS)
|
||||
max_tracked_lightnings = config_entry.options.get(
|
||||
CONF_MAX_TRACKED_LIGHTNINGS, DEFAULT_MAX_TRACKED_LIGHTNINGS
|
||||
)
|
||||
time_window_seconds = (
|
||||
config_entry.options.get(CONF_TIME_WINDOW, DEFAULT_TIME_WINDOW) * 60
|
||||
)
|
||||
if max_tracked_lightnings >= 500:
|
||||
_LOGGER.warning(
|
||||
"Large number of tracked lightnings: %s, it may lead to"
|
||||
"bigger memory usage / unstable frontend",
|
||||
max_tracked_lightnings,
|
||||
)
|
||||
|
||||
if hass.config.units == IMPERIAL_SYSTEM:
|
||||
radius_mi = radius
|
||||
radius = DistanceConverter.convert(radius, UnitOfLength.MILES, UnitOfLength.KILOMETERS)
|
||||
_LOGGER.info("imperial system, %s mi -> %s km", radius_mi, radius)
|
||||
|
||||
coordinator = BlitzortungCoordinator(
|
||||
hass,
|
||||
latitude,
|
||||
longitude,
|
||||
radius,
|
||||
max_tracked_lightnings,
|
||||
time_window_seconds,
|
||||
DEFAULT_UPDATE_INTERVAL,
|
||||
server_stats=config.get(const.SERVER_STATS),
|
||||
)
|
||||
|
||||
hass.data[DOMAIN][config_entry.entry_id] = coordinator
|
||||
|
||||
async def start_platforms():
|
||||
await asyncio.gather(
|
||||
*[
|
||||
hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
for component in PLATFORMS
|
||||
]
|
||||
)
|
||||
await coordinator.connect()
|
||||
|
||||
hass.async_create_task(start_platforms())
|
||||
|
||||
if not config_entry.update_listeners:
|
||||
config_entry.add_update_listener(async_update_options)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_update_options(hass, config_entry):
|
||||
"""Update options."""
|
||||
_LOGGER.info("async_update_options")
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry):
|
||||
"""Unload a config entry."""
|
||||
coordinator = hass.data[DOMAIN].pop(config_entry.entry_id)
|
||||
await coordinator.disconnect()
|
||||
_LOGGER.info("disconnected")
|
||||
|
||||
# cleanup platforms
|
||||
unload_ok = all(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
hass.config_entries.async_forward_entry_unload(config_entry, component)
|
||||
for component in PLATFORMS
|
||||
]
|
||||
)
|
||||
)
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_migrate_entry(hass, entry):
|
||||
_LOGGER.debug("Migrating Blitzortung entry from Version %s", entry.version)
|
||||
if entry.version == 1:
|
||||
latitude = entry.data[CONF_LATITUDE]
|
||||
longitude = entry.data[CONF_LONGITUDE]
|
||||
radius = entry.data[CONF_RADIUS]
|
||||
name = entry.data[CONF_NAME]
|
||||
|
||||
entry.unique_id = f"{latitude}-{longitude}-{name}-lightning"
|
||||
entry.data = {CONF_NAME: name}
|
||||
entry.options = {
|
||||
CONF_LATITUDE: latitude,
|
||||
CONF_LONGITUDE: longitude,
|
||||
CONF_RADIUS: radius,
|
||||
}
|
||||
entry.version = 2
|
||||
if entry.version == 2:
|
||||
entry.options = dict(entry.options)
|
||||
entry.options[CONF_IDLE_RESET_TIMEOUT] = DEFAULT_IDLE_RESET_TIMEOUT
|
||||
entry.version = 3
|
||||
if entry.version == 3:
|
||||
entry.options = dict(entry.options)
|
||||
entry.options[CONF_TIME_WINDOW] = entry.options.pop(
|
||||
CONF_IDLE_RESET_TIMEOUT, DEFAULT_TIME_WINDOW
|
||||
)
|
||||
entry.version = 4
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class BlitzortungCoordinator:
|
||||
def __init__(
|
||||
self,
|
||||
hass,
|
||||
latitude,
|
||||
longitude,
|
||||
radius, # unit: km
|
||||
max_tracked_lightnings,
|
||||
time_window_seconds,
|
||||
update_interval,
|
||||
server_stats=False,
|
||||
):
|
||||
"""Initialize."""
|
||||
self.hass = hass
|
||||
self.latitude = latitude
|
||||
self.longitude = longitude
|
||||
self.radius = radius
|
||||
self.max_tracked_lightnings = max_tracked_lightnings
|
||||
self.time_window_seconds = time_window_seconds
|
||||
self.server_stats = server_stats
|
||||
self.last_time = 0
|
||||
self.sensors = []
|
||||
self.callbacks = []
|
||||
self.lightning_callbacks = []
|
||||
self.on_tick_callbacks = []
|
||||
self.geohash_overlap = geohash_overlap(
|
||||
self.latitude, self.longitude, self.radius
|
||||
)
|
||||
self._disconnect_callbacks = []
|
||||
self.unloading = False
|
||||
|
||||
_LOGGER.info(
|
||||
"lat: %s, lon: %s, radius: %skm, geohashes: %s",
|
||||
self.latitude,
|
||||
self.longitude,
|
||||
self.radius,
|
||||
self.geohash_overlap,
|
||||
)
|
||||
|
||||
self.mqtt_client = MQTT(
|
||||
hass,
|
||||
"blitzortung.ha.sed.pl",
|
||||
1883,
|
||||
)
|
||||
|
||||
self._disconnect_callbacks.append(
|
||||
async_dispatcher_connect(
|
||||
self.hass, MQTT_CONNECTED, self._on_connection_change
|
||||
)
|
||||
)
|
||||
self._disconnect_callbacks.append(
|
||||
async_dispatcher_connect(
|
||||
self.hass, MQTT_DISCONNECTED, self._on_connection_change
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _on_connection_change(self, *args, **kwargs):
|
||||
if self.unloading:
|
||||
return
|
||||
for sensor in self.sensors:
|
||||
sensor.async_write_ha_state()
|
||||
|
||||
def compute_polar_coords(self, lightning):
|
||||
dy = (lightning["lat"] - self.latitude) * math.pi / 180
|
||||
dx = (
|
||||
(lightning["lon"] - self.longitude)
|
||||
* math.pi
|
||||
/ 180
|
||||
* math.cos(self.latitude * math.pi / 180)
|
||||
)
|
||||
distance = round(math.sqrt(dx * dx + dy * dy) * 6371, 1)
|
||||
azimuth = round(math.atan2(dx, dy) * 180 / math.pi) % 360
|
||||
|
||||
lightning[SensorDeviceClass.DISTANCE] = distance
|
||||
lightning[const.ATTR_LIGHTNING_AZIMUTH] = azimuth
|
||||
|
||||
async def connect(self):
|
||||
await self.mqtt_client.async_connect()
|
||||
_LOGGER.info("Connected to Blitzortung proxy mqtt server")
|
||||
for geohash_code in self.geohash_overlap:
|
||||
geohash_part = "/".join(geohash_code)
|
||||
await self.mqtt_client.async_subscribe(
|
||||
"blitzortung/1.1/{}/#".format(geohash_part), self.on_mqtt_message, qos=0
|
||||
)
|
||||
if self.server_stats:
|
||||
await self.mqtt_client.async_subscribe(
|
||||
"$SYS/broker/#", self.on_mqtt_message, qos=0
|
||||
)
|
||||
await self.mqtt_client.async_subscribe(
|
||||
"component/hello", self.on_hello_message, qos=0
|
||||
)
|
||||
|
||||
self._disconnect_callbacks.append(
|
||||
async_track_time_interval(
|
||||
self.hass, self._tick, const.DEFAULT_UPDATE_INTERVAL
|
||||
)
|
||||
)
|
||||
|
||||
async def disconnect(self):
|
||||
self.unloading = True
|
||||
await self.mqtt_client.async_disconnect()
|
||||
for cb in self._disconnect_callbacks:
|
||||
cb()
|
||||
|
||||
def on_hello_message(self, message, *args):
|
||||
def parse_version(version_str):
|
||||
return tuple(map(int, version_str.split(".")))
|
||||
|
||||
data = json.loads(message.payload)
|
||||
latest_version_str = data.get("latest_version")
|
||||
if latest_version_str:
|
||||
default_message = (
|
||||
f"New version {latest_version_str} is available. "
|
||||
f"[Check it out](https://github.com/mrk-its/homeassistant-blitzortung)"
|
||||
)
|
||||
latest_version_message = data.get("latest_version_message", default_message)
|
||||
latest_version_title = data.get("latest_version_title", "Blitzortung")
|
||||
latest_version = parse_version(latest_version_str)
|
||||
current_version = parse_version(__version__)
|
||||
if latest_version > current_version:
|
||||
_LOGGER.info("new version is available: %s", latest_version_str)
|
||||
self.hass.components.persistent_notification.async_create(
|
||||
title=latest_version_title,
|
||||
message=latest_version_message,
|
||||
notification_id="blitzortung_new_version_available",
|
||||
)
|
||||
|
||||
async def on_mqtt_message(self, message, *args):
|
||||
for callback in self.callbacks:
|
||||
callback(message)
|
||||
if message.topic.startswith("blitzortung/1.1"):
|
||||
lightning = json.loads(message.payload)
|
||||
self.compute_polar_coords(lightning)
|
||||
if lightning[SensorDeviceClass.DISTANCE] < self.radius:
|
||||
_LOGGER.debug("lightning data: %s", lightning)
|
||||
self.last_time = time.time()
|
||||
for callback in self.lightning_callbacks:
|
||||
await callback(lightning)
|
||||
for sensor in self.sensors:
|
||||
sensor.update_lightning(lightning)
|
||||
|
||||
def register_sensor(self, sensor):
|
||||
self.sensors.append(sensor)
|
||||
self.register_on_tick(sensor.tick)
|
||||
|
||||
def register_message_receiver(self, message_cb):
|
||||
self.callbacks.append(message_cb)
|
||||
|
||||
def register_lightning_receiver(self, lightning_cb):
|
||||
self.lightning_callbacks.append(lightning_cb)
|
||||
|
||||
def register_on_tick(self, on_tick_cb):
|
||||
self.on_tick_callbacks.append(on_tick_cb)
|
||||
|
||||
@property
|
||||
def is_inactive(self):
|
||||
return bool(
|
||||
self.time_window_seconds
|
||||
and (time.time() - self.last_time) >= self.time_window_seconds
|
||||
)
|
||||
|
||||
@property
|
||||
def is_connected(self):
|
||||
return self.mqtt_client.connected
|
||||
|
||||
async def _tick(self, *args):
|
||||
for cb in self.on_tick_callbacks:
|
||||
cb()
|
||||
91
config/custom_components/blitzortung/config_flow.py
Normal file
91
config/custom_components/blitzortung/config_flow.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Config flow for blitzortung integration."""
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
|
||||
from .const import (
|
||||
CONF_MAX_TRACKED_LIGHTNINGS,
|
||||
CONF_RADIUS,
|
||||
CONF_TIME_WINDOW,
|
||||
DEFAULT_MAX_TRACKED_LIGHTNINGS,
|
||||
DEFAULT_RADIUS,
|
||||
DEFAULT_TIME_WINDOW,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
DEFAULT_CONF_NAME = "Blitzortung"
|
||||
|
||||
|
||||
class DomainConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for blitzortung."""
|
||||
|
||||
VERSION = 4
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle the initial step."""
|
||||
if user_input is not None:
|
||||
await self.async_set_unique_id(user_input[CONF_NAME])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=user_input[CONF_NAME], data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(CONF_NAME, default=DEFAULT_CONF_NAME): str}
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def async_get_options_flow(config_entry):
|
||||
return OptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
def __init__(self, config_entry: config_entries.ConfigEntry):
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(self, user_input=None):
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_LATITUDE,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_LATITUDE, self.hass.config.latitude
|
||||
),
|
||||
): cv.latitude,
|
||||
vol.Required(
|
||||
CONF_LONGITUDE,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_LONGITUDE, self.hass.config.longitude
|
||||
),
|
||||
): cv.longitude,
|
||||
vol.Required(
|
||||
CONF_RADIUS,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_RADIUS, DEFAULT_RADIUS
|
||||
),
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TIME_WINDOW,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_TIME_WINDOW, DEFAULT_TIME_WINDOW,
|
||||
),
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_MAX_TRACKED_LIGHTNINGS,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_MAX_TRACKED_LIGHTNINGS, DEFAULT_MAX_TRACKED_LIGHTNINGS,
|
||||
),
|
||||
): int,
|
||||
}
|
||||
),
|
||||
)
|
||||
33
config/custom_components/blitzortung/const.py
Normal file
33
config/custom_components/blitzortung/const.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import datetime
|
||||
|
||||
SW_VERSION = "1.3.1"
|
||||
|
||||
PLATFORMS = ["sensor", "geo_location"]
|
||||
|
||||
DOMAIN = "blitzortung"
|
||||
DATA_UNSUBSCRIBE = "unsubscribe"
|
||||
ATTR_LIGHTNING_AZIMUTH = "azimuth"
|
||||
ATTR_LIGHTNING_COUNTER = "counter"
|
||||
|
||||
SERVER_STATS = "server_stats"
|
||||
|
||||
BASE_URL_TEMPLATE = (
|
||||
"http://data{data_host_nr}.blitzortung.org/Data/Protected/last_strikes.php"
|
||||
)
|
||||
|
||||
CONF_RADIUS = "radius"
|
||||
CONF_IDLE_RESET_TIMEOUT = "idle_reset_timeout"
|
||||
CONF_TIME_WINDOW = "time_window"
|
||||
CONF_MAX_TRACKED_LIGHTNINGS = "max_tracked_lightnings"
|
||||
|
||||
DEFAULT_IDLE_RESET_TIMEOUT = 120
|
||||
DEFAULT_RADIUS = 100
|
||||
DEFAULT_MAX_TRACKED_LIGHTNINGS = 100
|
||||
DEFAULT_TIME_WINDOW = 120
|
||||
DEFAULT_UPDATE_INTERVAL = datetime.timedelta(seconds=60)
|
||||
|
||||
ATTR_LAT = "lat"
|
||||
ATTR_LON = "lon"
|
||||
ATTRIBUTION = "Data provided by blitzortung.org"
|
||||
ATTR_EXTERNAL_ID = "external_id"
|
||||
ATTR_PUBLICATION_DATE = "publication_date"
|
||||
217
config/custom_components/blitzortung/geo_location.py
Normal file
217
config/custom_components/blitzortung/geo_location.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""Support for Blitzortung geo location events."""
|
||||
import bisect
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from homeassistant.components.geo_location import GeolocationEvent
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION,
|
||||
UnitOfLength
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
|
||||
|
||||
from .const import ATTR_EXTERNAL_ID, ATTR_PUBLICATION_DATE, ATTRIBUTION, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DEFAULT_EVENT_NAME_TEMPLATE = "Lightning Strike"
|
||||
DEFAULT_ICON = "mdi:flash"
|
||||
|
||||
SIGNAL_DELETE_ENTITY = "blitzortung_delete_entity_{0}"
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
if not coordinator.max_tracked_lightnings:
|
||||
return
|
||||
|
||||
manager = BlitzortungEventManager(
|
||||
hass,
|
||||
async_add_entities,
|
||||
coordinator.max_tracked_lightnings,
|
||||
coordinator.time_window_seconds,
|
||||
)
|
||||
|
||||
coordinator.register_lightning_receiver(manager.lightning_cb)
|
||||
coordinator.register_on_tick(manager.tick)
|
||||
|
||||
|
||||
class Strikes(list):
|
||||
def __init__(self, capacity):
|
||||
self._keys = []
|
||||
self._key_fn = lambda strike: strike._publication_date
|
||||
self._max_key = 0
|
||||
self._capacity = capacity
|
||||
super().__init__()
|
||||
|
||||
def insort(self, item):
|
||||
k = self._key_fn(item)
|
||||
if k > self._max_key:
|
||||
self._max_key = k
|
||||
self._keys.append(k)
|
||||
self.append(item)
|
||||
else:
|
||||
i = bisect.bisect_right(self._keys, k)
|
||||
self._keys.insert(i, k)
|
||||
self.insert(i, item)
|
||||
n = len(self) - self._capacity
|
||||
if n > 0:
|
||||
del self._keys[0:n]
|
||||
to_delete = self[0:n]
|
||||
self[0:n] = []
|
||||
return to_delete
|
||||
return ()
|
||||
|
||||
def cleanup(self, k):
|
||||
if not self._keys or self._keys[0] > k:
|
||||
return ()
|
||||
|
||||
i = bisect.bisect_right(self._keys, k)
|
||||
if not i:
|
||||
return ()
|
||||
|
||||
del self._keys[0:i]
|
||||
to_delete = self[0:i]
|
||||
self[0:i] = []
|
||||
return to_delete
|
||||
|
||||
|
||||
class BlitzortungEventManager:
|
||||
"""Define a class to handle Blitzortung events."""
|
||||
|
||||
def __init__(
|
||||
self, hass, async_add_entities, max_tracked_lightnings, window_seconds,
|
||||
):
|
||||
"""Initialize."""
|
||||
self._async_add_entities = async_add_entities
|
||||
self._hass = hass
|
||||
self._strikes = Strikes(max_tracked_lightnings)
|
||||
self._window_seconds = window_seconds
|
||||
|
||||
if hass.config.units == IMPERIAL_SYSTEM:
|
||||
self._unit = UnitOfLength.MILES
|
||||
else:
|
||||
self._unit = UnitOfLength.KILOMETERS
|
||||
|
||||
async def lightning_cb(self, lightning):
|
||||
_LOGGER.debug("geo_location lightning: %s", lightning)
|
||||
event = BlitzortungEvent(
|
||||
lightning["distance"],
|
||||
lightning["lat"],
|
||||
lightning["lon"],
|
||||
self._unit,
|
||||
lightning["time"],
|
||||
lightning["status"],
|
||||
lightning["region"],
|
||||
)
|
||||
to_delete = self._strikes.insort(event)
|
||||
self._async_add_entities([event])
|
||||
if to_delete:
|
||||
self._remove_events(to_delete)
|
||||
_LOGGER.debug("tracked lightnings: %s", len(self._strikes))
|
||||
|
||||
@callback
|
||||
def _remove_events(self, events):
|
||||
"""Remove old geo location events."""
|
||||
_LOGGER.debug("Going to remove %s", events)
|
||||
for event in events:
|
||||
async_dispatcher_send(
|
||||
self._hass, SIGNAL_DELETE_ENTITY.format(event._strike_id)
|
||||
)
|
||||
|
||||
def tick(self):
|
||||
to_delete = self._strikes.cleanup(time.time() - self._window_seconds)
|
||||
if to_delete:
|
||||
self._remove_events(to_delete)
|
||||
|
||||
|
||||
class BlitzortungEvent(GeolocationEvent):
|
||||
"""Define a lightning strike event."""
|
||||
|
||||
def __init__(self, distance, latitude, longitude, unit, time, status, region):
|
||||
"""Initialize entity with data provided."""
|
||||
self._distance = distance
|
||||
self._latitude = latitude
|
||||
self._longitude = longitude
|
||||
self._time = time
|
||||
self._status = status
|
||||
self._region = region
|
||||
self._publication_date = time / 1e9
|
||||
self._remove_signal_delete = None
|
||||
self._strike_id = str(uuid.uuid4()).replace("-", "")
|
||||
self._unit_of_measurement = unit
|
||||
self.entity_id = "geo_location.lightning_strike_{0}".format(self._strike_id)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the device state attributes."""
|
||||
attributes = {}
|
||||
for key, value in (
|
||||
(ATTR_EXTERNAL_ID, self._strike_id),
|
||||
(ATTR_ATTRIBUTION, ATTRIBUTION),
|
||||
(ATTR_PUBLICATION_DATE, utc_from_timestamp(self._publication_date)),
|
||||
):
|
||||
attributes[key] = value
|
||||
return attributes
|
||||
|
||||
@property
|
||||
def distance(self):
|
||||
"""Return distance value of this external event."""
|
||||
return self._distance
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon to use in the front-end."""
|
||||
return DEFAULT_ICON
|
||||
|
||||
@property
|
||||
def latitude(self):
|
||||
"""Return latitude value of this external event."""
|
||||
return self._latitude
|
||||
|
||||
@property
|
||||
def longitude(self):
|
||||
"""Return longitude value of this external event."""
|
||||
return self._longitude
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the event."""
|
||||
return DEFAULT_EVENT_NAME_TEMPLATE.format(self._publication_date)
|
||||
|
||||
@property
|
||||
def source(self) -> str:
|
||||
"""Return source value of this external event."""
|
||||
return DOMAIN
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Disable polling."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement."""
|
||||
return self._unit_of_measurement
|
||||
|
||||
@callback
|
||||
def _delete_callback(self):
|
||||
"""Remove this entity."""
|
||||
self._remove_signal_delete()
|
||||
self.hass.async_create_task(self.async_remove())
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Call when entity is added to hass."""
|
||||
self._remove_signal_delete = async_dispatcher_connect(
|
||||
self.hass,
|
||||
SIGNAL_DELETE_ENTITY.format(self._strike_id),
|
||||
self._delete_callback,
|
||||
)
|
||||
466
config/custom_components/blitzortung/geohash.py
Normal file
466
config/custom_components/blitzortung/geohash.py
Normal file
@@ -0,0 +1,466 @@
|
||||
# coding: UTF-8
|
||||
# flake8: noqa
|
||||
"""
|
||||
Copyright (C) 2009 Hiroaki Kawai <kawai@iij.ad.jp>
|
||||
"""
|
||||
try:
|
||||
import _geohash
|
||||
except ImportError:
|
||||
_geohash = None
|
||||
|
||||
__version__ = "0.8.5"
|
||||
__all__ = ['encode','decode','decode_exactly','bbox', 'neighbors', 'expand']
|
||||
|
||||
_base32 = '0123456789bcdefghjkmnpqrstuvwxyz'
|
||||
_base32_map = {}
|
||||
for i in range(len(_base32)):
|
||||
_base32_map[_base32[i]] = i
|
||||
del i
|
||||
|
||||
LONG_ZERO = 0
|
||||
import sys
|
||||
if sys.version_info[0] < 3:
|
||||
LONG_ZERO = long(0)
|
||||
|
||||
def _float_hex_to_int(f):
|
||||
if f<-1.0 or f>=1.0:
|
||||
return None
|
||||
|
||||
if f==0.0:
|
||||
return 1,1
|
||||
|
||||
h = f.hex()
|
||||
x = h.find("0x1.")
|
||||
assert(x>=0)
|
||||
p = h.find("p")
|
||||
assert(p>0)
|
||||
|
||||
half_len = len(h[x+4:p])*4-int(h[p+1:])
|
||||
if x==0:
|
||||
r = (1<<half_len) + ((1<<(len(h[x+4:p])*4)) + int(h[x+4:p],16))
|
||||
else:
|
||||
r = (1<<half_len) - ((1<<(len(h[x+4:p])*4)) + int(h[x+4:p],16))
|
||||
|
||||
return r, half_len+1
|
||||
|
||||
def _int_to_float_hex(i, l):
|
||||
if l==0:
|
||||
return -1.0
|
||||
|
||||
half = 1<<(l-1)
|
||||
s = int((l+3)/4)
|
||||
if i >= half:
|
||||
i = i-half
|
||||
return float.fromhex(("0x0.%0"+str(s)+"xp1") % (i<<(s*4-l),))
|
||||
else:
|
||||
i = half-i
|
||||
return float.fromhex(("-0x0.%0"+str(s)+"xp1") % (i<<(s*4-l),))
|
||||
|
||||
def _encode_i2c(lat,lon,lat_length,lon_length):
|
||||
precision = int((lat_length+lon_length)/5)
|
||||
if lat_length < lon_length:
|
||||
a = lon
|
||||
b = lat
|
||||
else:
|
||||
a = lat
|
||||
b = lon
|
||||
|
||||
boost = (0,1,4,5,16,17,20,21)
|
||||
ret = ''
|
||||
for i in range(precision):
|
||||
ret+=_base32[(boost[a&7]+(boost[b&3]<<1))&0x1F]
|
||||
t = a>>3
|
||||
a = b>>2
|
||||
b = t
|
||||
|
||||
return ret[::-1]
|
||||
|
||||
def encode(latitude, longitude, precision=12):
|
||||
if latitude >= 90.0 or latitude < -90.0:
|
||||
raise Exception("invalid latitude.")
|
||||
while longitude < -180.0:
|
||||
longitude += 360.0
|
||||
while longitude >= 180.0:
|
||||
longitude -= 360.0
|
||||
|
||||
if _geohash:
|
||||
basecode=_geohash.encode(latitude,longitude)
|
||||
if len(basecode)>precision:
|
||||
return basecode[0:precision]
|
||||
return basecode+'0'*(precision-len(basecode))
|
||||
|
||||
xprecision=precision+1
|
||||
lat_length = lon_length = int(xprecision*5/2)
|
||||
if xprecision%2==1:
|
||||
lon_length+=1
|
||||
|
||||
if hasattr(float, "fromhex"):
|
||||
a = _float_hex_to_int(latitude/90.0)
|
||||
o = _float_hex_to_int(longitude/180.0)
|
||||
if a[1] > lat_length:
|
||||
ai = a[0]>>(a[1]-lat_length)
|
||||
else:
|
||||
ai = a[0]<<(lat_length-a[1])
|
||||
|
||||
if o[1] > lon_length:
|
||||
oi = o[0]>>(o[1]-lon_length)
|
||||
else:
|
||||
oi = o[0]<<(lon_length-o[1])
|
||||
|
||||
return _encode_i2c(ai, oi, lat_length, lon_length)[:precision]
|
||||
|
||||
lat = latitude/180.0
|
||||
lon = longitude/360.0
|
||||
|
||||
if lat>0:
|
||||
lat = int((1<<lat_length)*lat)+(1<<(lat_length-1))
|
||||
else:
|
||||
lat = (1<<lat_length-1)-int((1<<lat_length)*(-lat))
|
||||
|
||||
if lon>0:
|
||||
lon = int((1<<lon_length)*lon)+(1<<(lon_length-1))
|
||||
else:
|
||||
lon = (1<<lon_length-1)-int((1<<lon_length)*(-lon))
|
||||
|
||||
return _encode_i2c(lat,lon,lat_length,lon_length)[:precision]
|
||||
|
||||
def _decode_c2i(hashcode):
|
||||
lon = 0
|
||||
lat = 0
|
||||
bit_length = 0
|
||||
lat_length = 0
|
||||
lon_length = 0
|
||||
for i in hashcode:
|
||||
t = _base32_map[i]
|
||||
if bit_length%2==0:
|
||||
lon = lon<<3
|
||||
lat = lat<<2
|
||||
lon += (t>>2)&4
|
||||
lat += (t>>2)&2
|
||||
lon += (t>>1)&2
|
||||
lat += (t>>1)&1
|
||||
lon += t&1
|
||||
lon_length+=3
|
||||
lat_length+=2
|
||||
else:
|
||||
lon = lon<<2
|
||||
lat = lat<<3
|
||||
lat += (t>>2)&4
|
||||
lon += (t>>2)&2
|
||||
lat += (t>>1)&2
|
||||
lon += (t>>1)&1
|
||||
lat += t&1
|
||||
lon_length+=2
|
||||
lat_length+=3
|
||||
|
||||
bit_length+=5
|
||||
|
||||
return (lat,lon,lat_length,lon_length)
|
||||
|
||||
def decode(hashcode, delta=False):
|
||||
'''
|
||||
decode a hashcode and get center coordinate, and distance between center and outer border
|
||||
'''
|
||||
if _geohash:
|
||||
(lat,lon,lat_bits,lon_bits) = _geohash.decode(hashcode)
|
||||
latitude_delta = 90.0/(1<<lat_bits)
|
||||
longitude_delta = 180.0/(1<<lon_bits)
|
||||
latitude = lat + latitude_delta
|
||||
longitude = lon + longitude_delta
|
||||
if delta:
|
||||
return latitude,longitude,latitude_delta,longitude_delta
|
||||
return latitude,longitude
|
||||
|
||||
(lat,lon,lat_length,lon_length) = _decode_c2i(hashcode)
|
||||
|
||||
if hasattr(float, "fromhex"):
|
||||
latitude_delta = 90.0/(1<<lat_length)
|
||||
longitude_delta = 180.0/(1<<lon_length)
|
||||
latitude = _int_to_float_hex(lat, lat_length) * 90.0 + latitude_delta
|
||||
longitude = _int_to_float_hex(lon, lon_length) * 180.0 + longitude_delta
|
||||
if delta:
|
||||
return latitude,longitude,latitude_delta,longitude_delta
|
||||
return latitude,longitude
|
||||
|
||||
lat = (lat<<1) + 1
|
||||
lon = (lon<<1) + 1
|
||||
lat_length += 1
|
||||
lon_length += 1
|
||||
|
||||
latitude = 180.0*(lat-(1<<(lat_length-1)))/(1<<lat_length)
|
||||
longitude = 360.0*(lon-(1<<(lon_length-1)))/(1<<lon_length)
|
||||
if delta:
|
||||
latitude_delta = 180.0/(1<<lat_length)
|
||||
longitude_delta = 360.0/(1<<lon_length)
|
||||
return latitude,longitude,latitude_delta,longitude_delta
|
||||
|
||||
return latitude,longitude
|
||||
|
||||
def decode_exactly(hashcode):
|
||||
return decode(hashcode, True)
|
||||
|
||||
## hashcode operations below
|
||||
|
||||
def bbox(hashcode):
|
||||
'''
|
||||
decode a hashcode and get north, south, east and west border.
|
||||
'''
|
||||
if _geohash:
|
||||
(lat,lon,lat_bits,lon_bits) = _geohash.decode(hashcode)
|
||||
latitude_delta = 180.0/(1<<lat_bits)
|
||||
longitude_delta = 360.0/(1<<lon_bits)
|
||||
return {'s':lat,'w':lon,'n':lat+latitude_delta,'e':lon+longitude_delta}
|
||||
|
||||
(lat,lon,lat_length,lon_length) = _decode_c2i(hashcode)
|
||||
if hasattr(float, "fromhex"):
|
||||
latitude_delta = 180.0/(1<<lat_length)
|
||||
longitude_delta = 360.0/(1<<lon_length)
|
||||
latitude = _int_to_float_hex(lat, lat_length) * 90.0
|
||||
longitude = _int_to_float_hex(lon, lon_length) * 180.0
|
||||
return {"s":latitude, "w":longitude, "n":latitude+latitude_delta, "e":longitude+longitude_delta}
|
||||
|
||||
ret={}
|
||||
if lat_length:
|
||||
ret['n'] = 180.0*(lat+1-(1<<(lat_length-1)))/(1<<lat_length)
|
||||
ret['s'] = 180.0*(lat-(1<<(lat_length-1)))/(1<<lat_length)
|
||||
else: # can't calculate the half with bit shifts (negative shift)
|
||||
ret['n'] = 90.0
|
||||
ret['s'] = -90.0
|
||||
|
||||
if lon_length:
|
||||
ret['e'] = 360.0*(lon+1-(1<<(lon_length-1)))/(1<<lon_length)
|
||||
ret['w'] = 360.0*(lon-(1<<(lon_length-1)))/(1<<lon_length)
|
||||
else: # can't calculate the half with bit shifts (negative shift)
|
||||
ret['e'] = 180.0
|
||||
ret['w'] = -180.0
|
||||
|
||||
return ret
|
||||
|
||||
def neighbors(hashcode):
|
||||
if _geohash and len(hashcode)<25:
|
||||
return _geohash.neighbors(hashcode)
|
||||
|
||||
(lat,lon,lat_length,lon_length) = _decode_c2i(hashcode)
|
||||
ret = []
|
||||
tlat = lat
|
||||
for tlon in (lon-1, lon+1):
|
||||
code = _encode_i2c(tlat,tlon,lat_length,lon_length)
|
||||
if code:
|
||||
ret.append(code)
|
||||
|
||||
tlat = lat+1
|
||||
if not tlat >> lat_length:
|
||||
for tlon in (lon-1, lon, lon+1):
|
||||
ret.append(_encode_i2c(tlat,tlon,lat_length,lon_length))
|
||||
|
||||
tlat = lat-1
|
||||
if tlat >= 0:
|
||||
for tlon in (lon-1, lon, lon+1):
|
||||
ret.append(_encode_i2c(tlat,tlon,lat_length,lon_length))
|
||||
|
||||
return ret
|
||||
|
||||
def expand(hashcode):
|
||||
ret = neighbors(hashcode)
|
||||
ret.append(hashcode)
|
||||
return ret
|
||||
|
||||
def _uint64_interleave(lat32, lon32):
|
||||
intr = 0
|
||||
boost = (0,1,4,5,16,17,20,21,64,65,68,69,80,81,84,85)
|
||||
for i in range(8):
|
||||
intr = (intr<<8) + (boost[(lon32>>(28-i*4))%16]<<1) + boost[(lat32>>(28-i*4))%16]
|
||||
|
||||
return intr
|
||||
|
||||
def _uint64_deinterleave(ui64):
|
||||
lat = lon = 0
|
||||
boost = ((0,0),(0,1),(1,0),(1,1),(0,2),(0,3),(1,2),(1,3),
|
||||
(2,0),(2,1),(3,0),(3,1),(2,2),(2,3),(3,2),(3,3))
|
||||
for i in range(16):
|
||||
p = boost[(ui64>>(60-i*4))%16]
|
||||
lon = (lon<<2) + p[0]
|
||||
lat = (lat<<2) + p[1]
|
||||
|
||||
return (lat, lon)
|
||||
|
||||
def encode_uint64(latitude, longitude):
|
||||
if latitude >= 90.0 or latitude < -90.0:
|
||||
raise ValueError("Latitude must be in the range of (-90.0, 90.0)")
|
||||
while longitude < -180.0:
|
||||
longitude += 360.0
|
||||
while longitude >= 180.0:
|
||||
longitude -= 360.0
|
||||
|
||||
if _geohash:
|
||||
ui128 = _geohash.encode_int(latitude,longitude)
|
||||
if _geohash.intunit == 64:
|
||||
return ui128[0]
|
||||
elif _geohash.intunit == 32:
|
||||
return (ui128[0]<<32) + ui128[1]
|
||||
elif _geohash.intunit == 16:
|
||||
return (ui128[0]<<48) + (ui128[1]<<32) + (ui128[2]<<16) + ui128[3]
|
||||
|
||||
lat = int(((latitude + 90.0)/180.0)*(1<<32))
|
||||
lon = int(((longitude+180.0)/360.0)*(1<<32))
|
||||
return _uint64_interleave(lat, lon)
|
||||
|
||||
def decode_uint64(ui64):
|
||||
if _geohash:
|
||||
latlon = _geohash.decode_int(ui64 % 0xFFFFFFFFFFFFFFFF, LONG_ZERO)
|
||||
if latlon:
|
||||
return latlon
|
||||
|
||||
lat,lon = _uint64_deinterleave(ui64)
|
||||
return (180.0*lat/(1<<32) - 90.0, 360.0*lon/(1<<32) - 180.0)
|
||||
|
||||
def expand_uint64(ui64, precision=50):
|
||||
ui64 = ui64 & (0xFFFFFFFFFFFFFFFF << (64-precision))
|
||||
lat,lon = _uint64_deinterleave(ui64)
|
||||
lat_grid = 1<<(32-int(precision/2))
|
||||
lon_grid = lat_grid>>(precision%2)
|
||||
|
||||
if precision<=2: # expand becomes to the whole range
|
||||
return []
|
||||
|
||||
ranges = []
|
||||
if lat & lat_grid:
|
||||
if lon & lon_grid:
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+2))))
|
||||
if precision%2==0:
|
||||
# lat,lon = (1, 1) and even precision
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+1))))
|
||||
|
||||
if lat + lat_grid < 0xFFFFFFFF:
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
else:
|
||||
# lat,lon = (1, 1) and odd precision
|
||||
if lat + lat_grid < 0xFFFFFFFF:
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+1))))
|
||||
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
|
||||
ui64 = _uint64_interleave(lat, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
else:
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+2))))
|
||||
if precision%2==0:
|
||||
# lat,lon = (1, 0) and odd precision
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+1))))
|
||||
|
||||
if lat + lat_grid < 0xFFFFFFFF:
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
else:
|
||||
# lat,lon = (1, 0) and odd precision
|
||||
if lat + lat_grid < 0xFFFFFFFF:
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+1))))
|
||||
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
else:
|
||||
if lon & lon_grid:
|
||||
ui64 = _uint64_interleave(lat, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+2))))
|
||||
if precision%2==0:
|
||||
# lat,lon = (0, 1) and even precision
|
||||
ui64 = _uint64_interleave(lat, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+1))))
|
||||
|
||||
if lat > 0:
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
else:
|
||||
# lat,lon = (0, 1) and odd precision
|
||||
if lat > 0:
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+1))))
|
||||
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
else:
|
||||
ui64 = _uint64_interleave(lat, lon)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+2))))
|
||||
if precision%2==0:
|
||||
# lat,lon = (0, 0) and even precision
|
||||
ui64 = _uint64_interleave(lat, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+1))))
|
||||
|
||||
if lat > 0:
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon+lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
else:
|
||||
# lat,lon = (0, 0) and odd precision
|
||||
if lat > 0:
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision+1))))
|
||||
|
||||
ui64 = _uint64_interleave(lat-lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
ui64 = _uint64_interleave(lat+lat_grid, lon-lon_grid)
|
||||
ranges.append((ui64, ui64 + (1<<(64-precision))))
|
||||
|
||||
ranges.sort()
|
||||
|
||||
# merge the conditions
|
||||
shrink = []
|
||||
prev = None
|
||||
for i in ranges:
|
||||
if prev:
|
||||
if prev[1] != i[0]:
|
||||
shrink.append(prev)
|
||||
prev = i
|
||||
else:
|
||||
prev = (prev[0], i[1])
|
||||
else:
|
||||
prev = i
|
||||
|
||||
shrink.append(prev)
|
||||
|
||||
ranges = []
|
||||
for i in shrink:
|
||||
a,b=i
|
||||
if a == 0:
|
||||
a = None # we can remove the condition because it is the lowest value
|
||||
if b == 0x10000000000000000:
|
||||
b = None # we can remove the condition because it is the highest value
|
||||
|
||||
ranges.append((a,b))
|
||||
|
||||
return ranges
|
||||
58
config/custom_components/blitzortung/geohash_utils.py
Normal file
58
config/custom_components/blitzortung/geohash_utils.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import math
|
||||
from collections import namedtuple
|
||||
|
||||
from . import geohash
|
||||
|
||||
Box = namedtuple("Box", ["s", "w", "n", "e"])
|
||||
|
||||
|
||||
def geohash_bbox(gh):
|
||||
ret = geohash.bbox(gh)
|
||||
return Box(ret["s"], ret["w"], ret["n"], ret["e"])
|
||||
|
||||
|
||||
def bbox(lat, lon, radius):
|
||||
lat_delta = radius * 360 / 40000
|
||||
lon_delta = lat_delta / math.cos(lat * math.pi / 180.0)
|
||||
return Box(lat - lat_delta, lon - lon_delta, lat + lat_delta, lon + lon_delta)
|
||||
|
||||
|
||||
def overlap(a1, a2, b1, b2):
|
||||
return a1 < b2 and a2 > b1
|
||||
|
||||
|
||||
def box_overlap(box1: Box, box2: Box):
|
||||
return overlap(box1.s, box1.n, box2.s, box2.n) and overlap(
|
||||
box1.w, box1.e, box2.w, box2.e
|
||||
)
|
||||
|
||||
|
||||
def compute_geohash_tiles(lat, lon, radius, precision):
|
||||
bounds = bbox(lat, lon, radius)
|
||||
center = geohash.encode(lat, lon, precision)
|
||||
|
||||
stack = set()
|
||||
checked = set()
|
||||
|
||||
stack.add(center)
|
||||
checked.add(center)
|
||||
|
||||
while stack:
|
||||
current = stack.pop()
|
||||
for neighbor in geohash.neighbors(current):
|
||||
if neighbor not in checked and box_overlap(geohash_bbox(neighbor), bounds):
|
||||
stack.add(neighbor)
|
||||
checked.add(neighbor)
|
||||
return checked
|
||||
|
||||
|
||||
def geohash_overlap(lat, lon, radius, max_tiles=9):
|
||||
result = []
|
||||
for precision in range(1, 13):
|
||||
tiles = compute_geohash_tiles(lat, lon, radius, precision)
|
||||
if len(tiles) <= 9:
|
||||
result = tiles
|
||||
precision += 1
|
||||
else:
|
||||
break
|
||||
return result
|
||||
17
config/custom_components/blitzortung/manifest.json
Normal file
17
config/custom_components/blitzortung/manifest.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"domain": "blitzortung",
|
||||
"name": "Blitzortung",
|
||||
"after_dependencies": [],
|
||||
"codeowners": [
|
||||
"@mrk-its"
|
||||
],
|
||||
"config_flow": true,
|
||||
"dependencies": [
|
||||
"persistent_notification"
|
||||
],
|
||||
"documentation": "https://github.com/mrk-its/homeassistant-blitzortung",
|
||||
"iot_class": "cloud_push",
|
||||
"issue_tracker": "https://github.com/mrk-its/homeassistant-blitzortung/issues",
|
||||
"requirements": ["paho-mqtt>=1.5.0"],
|
||||
"version": "1.0.1"
|
||||
}
|
||||
305
config/custom_components/blitzortung/mqtt.py
Normal file
305
config/custom_components/blitzortung/mqtt.py
Normal file
@@ -0,0 +1,305 @@
|
||||
"""Support for MQTT message handling."""
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import logging
|
||||
from itertools import groupby
|
||||
from operator import attrgetter
|
||||
from typing import Callable, List, Optional, Union
|
||||
|
||||
import attr
|
||||
|
||||
from homeassistant.core import callback, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_PORT = 1883
|
||||
DEFAULT_KEEPALIVE = 60
|
||||
PROTOCOL_311 = "3.1.1"
|
||||
DEFAULT_PROTOCOL = PROTOCOL_311
|
||||
MQTT_CONNECTED = "blitzortung_mqtt_connected"
|
||||
MQTT_DISCONNECTED = "blitzortung_mqtt_disconnected"
|
||||
|
||||
|
||||
MAX_RECONNECT_WAIT = 300 # seconds
|
||||
|
||||
|
||||
def _raise_on_error(result_code: int) -> None:
|
||||
"""Raise error if error result."""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
if result_code != 0:
|
||||
raise HomeAssistantError(
|
||||
f"Error talking to MQTT: {mqtt.error_string(result_code)}"
|
||||
)
|
||||
|
||||
|
||||
def _match_topic(subscription: str, topic: str) -> bool:
|
||||
"""Test if topic matches subscription."""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from paho.mqtt.matcher import MQTTMatcher
|
||||
|
||||
matcher = MQTTMatcher()
|
||||
matcher[subscription] = True
|
||||
try:
|
||||
next(matcher.iter_match(topic))
|
||||
return True
|
||||
except StopIteration:
|
||||
return False
|
||||
|
||||
|
||||
PublishPayloadType = Union[str, bytes, int, float, None]
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class Message:
|
||||
"""MQTT Message."""
|
||||
|
||||
topic = attr.ib(type=str)
|
||||
payload = attr.ib(type=PublishPayloadType)
|
||||
qos = attr.ib(type=int)
|
||||
retain = attr.ib(type=bool)
|
||||
subscribed_topic = attr.ib(type=str, default=None)
|
||||
timestamp = attr.ib(type=dt.datetime, default=None)
|
||||
|
||||
|
||||
MessageCallbackType = Callable[[Message], None]
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class Subscription:
|
||||
"""Class to hold data about an active subscription."""
|
||||
|
||||
topic = attr.ib(type=str)
|
||||
callback = attr.ib(type=MessageCallbackType)
|
||||
qos = attr.ib(type=int, default=0)
|
||||
encoding = attr.ib(type=str, default="utf-8")
|
||||
|
||||
|
||||
SubscribePayloadType = Union[str, bytes] # Only bytes if encoding is None
|
||||
|
||||
|
||||
class MQTT:
|
||||
"""Home Assistant MQTT client."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
host,
|
||||
port=DEFAULT_PORT,
|
||||
keepalive=DEFAULT_KEEPALIVE,
|
||||
) -> None:
|
||||
"""Initialize Home Assistant MQTT client."""
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
||||
|
||||
self.hass = hass
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.keepalive = keepalive
|
||||
self.subscriptions: List[Subscription] = []
|
||||
self.connected = False
|
||||
self._mqttc: mqtt.Client = None
|
||||
self._paho_lock = asyncio.Lock()
|
||||
|
||||
self.init_client()
|
||||
|
||||
def init_client(self):
|
||||
"""Initialize paho client."""
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
||||
|
||||
proto = mqtt.MQTTv311
|
||||
self._mqttc = mqtt.Client(protocol=proto)
|
||||
|
||||
self._mqttc.on_connect = self._mqtt_on_connect
|
||||
self._mqttc.on_disconnect = self._mqtt_on_disconnect
|
||||
self._mqttc.on_message = self._mqtt_on_message
|
||||
|
||||
async def async_publish(
|
||||
self, topic: str, payload: PublishPayloadType, qos: int, retain: bool
|
||||
) -> None:
|
||||
"""Publish a MQTT message."""
|
||||
async with self._paho_lock:
|
||||
_LOGGER.debug("Transmitting message on %s: %s", topic, payload)
|
||||
await self.hass.async_add_executor_job(
|
||||
self._mqttc.publish, topic, payload, qos, retain
|
||||
)
|
||||
|
||||
async def async_connect(self) -> str:
|
||||
"""Connect to the host. Does not process messages yet."""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
result: int = None
|
||||
try:
|
||||
result = await self.hass.async_add_executor_job(
|
||||
self._mqttc.connect, self.host, self.port, self.keepalive,
|
||||
)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Failed to connect to MQTT server due to exception: %s", err)
|
||||
|
||||
if result is not None and result != 0:
|
||||
_LOGGER.error(
|
||||
"Failed to connect to MQTT server: %s", mqtt.error_string(result)
|
||||
)
|
||||
|
||||
self._mqttc.loop_start()
|
||||
|
||||
async def async_disconnect(self):
|
||||
"""Stop the MQTT client."""
|
||||
|
||||
def stop():
|
||||
"""Stop the MQTT client."""
|
||||
self._mqttc.disconnect()
|
||||
self._mqttc.loop_stop()
|
||||
|
||||
await self.hass.async_add_executor_job(stop)
|
||||
|
||||
async def async_subscribe(
|
||||
self, topic: str, msg_callback, qos: int, encoding: Optional[str] = None,
|
||||
) -> Callable[[], None]:
|
||||
"""Set up a subscription to a topic with the provided qos.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
if not isinstance(topic, str):
|
||||
raise HomeAssistantError("Topic needs to be a string!")
|
||||
|
||||
subscription = Subscription(topic, msg_callback, qos, encoding)
|
||||
self.subscriptions.append(subscription)
|
||||
|
||||
# Only subscribe if currently connected.
|
||||
if self.connected:
|
||||
await self._async_perform_subscription(topic, qos)
|
||||
|
||||
@callback
|
||||
def async_remove() -> None:
|
||||
"""Remove subscription."""
|
||||
if subscription not in self.subscriptions:
|
||||
raise HomeAssistantError("Can't remove subscription twice")
|
||||
self.subscriptions.remove(subscription)
|
||||
|
||||
if any(other.topic == topic for other in self.subscriptions):
|
||||
# Other subscriptions on topic remaining - don't unsubscribe.
|
||||
return
|
||||
|
||||
# Only unsubscribe if currently connected.
|
||||
if self.connected:
|
||||
self.hass.async_create_task(self._async_unsubscribe(topic))
|
||||
|
||||
return async_remove
|
||||
|
||||
async def _async_unsubscribe(self, topic: str) -> None:
|
||||
"""Unsubscribe from a topic.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
_LOGGER.debug("Unsubscribing from %s", topic)
|
||||
async with self._paho_lock:
|
||||
result: int = None
|
||||
result, _ = await self.hass.async_add_executor_job(
|
||||
self._mqttc.unsubscribe, topic
|
||||
)
|
||||
_raise_on_error(result)
|
||||
|
||||
async def _async_perform_subscription(self, topic: str, qos: int) -> None:
|
||||
"""Perform a paho-mqtt subscription."""
|
||||
_LOGGER.debug("Subscribing to %s", topic)
|
||||
|
||||
async with self._paho_lock:
|
||||
result: int = None
|
||||
result, _ = await self.hass.async_add_executor_job(
|
||||
self._mqttc.subscribe, topic, qos
|
||||
)
|
||||
_raise_on_error(result)
|
||||
|
||||
def _mqtt_on_connect(self, _mqttc, _userdata, _flags, result_code: int) -> None:
|
||||
"""On connect callback.
|
||||
|
||||
Resubscribe to all topics we were subscribed to and publish birth
|
||||
message.
|
||||
"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
if result_code != mqtt.CONNACK_ACCEPTED:
|
||||
_LOGGER.error(
|
||||
"Unable to connect to the MQTT broker: %s",
|
||||
mqtt.connack_string(result_code),
|
||||
)
|
||||
return
|
||||
|
||||
self.connected = True
|
||||
dispatcher_send(self.hass, MQTT_CONNECTED)
|
||||
_LOGGER.info(
|
||||
"Connected to MQTT server %s:%s (%s)", self.host, self.port, result_code,
|
||||
)
|
||||
|
||||
# Group subscriptions to only re-subscribe once for each topic.
|
||||
keyfunc = attrgetter("topic")
|
||||
for topic, subs in groupby(sorted(self.subscriptions, key=keyfunc), keyfunc):
|
||||
# Re-subscribe with the highest requested qos
|
||||
max_qos = max(subscription.qos for subscription in subs)
|
||||
self.hass.add_job(self._async_perform_subscription, topic, max_qos)
|
||||
|
||||
def _mqtt_on_message(self, _mqttc, _userdata, msg) -> None:
|
||||
"""Message received callback."""
|
||||
self.hass.add_job(self._mqtt_handle_message, msg)
|
||||
|
||||
@callback
|
||||
def _mqtt_handle_message(self, msg) -> None:
|
||||
_LOGGER.debug(
|
||||
"Received message on %s%s: %s",
|
||||
msg.topic,
|
||||
" (retained)" if msg.retain else "",
|
||||
msg.payload,
|
||||
)
|
||||
timestamp = dt_util.utcnow()
|
||||
|
||||
for subscription in self.subscriptions:
|
||||
if not _match_topic(subscription.topic, msg.topic):
|
||||
continue
|
||||
|
||||
payload: SubscribePayloadType = msg.payload
|
||||
if subscription.encoding is not None:
|
||||
try:
|
||||
payload = msg.payload.decode(subscription.encoding)
|
||||
except (AttributeError, UnicodeDecodeError):
|
||||
_LOGGER.warning(
|
||||
"Can't decode payload %s on %s with encoding %s (for %s)",
|
||||
msg.payload,
|
||||
msg.topic,
|
||||
subscription.encoding,
|
||||
subscription.callback,
|
||||
)
|
||||
continue
|
||||
|
||||
self.hass.async_create_task(
|
||||
subscription.callback(
|
||||
Message(
|
||||
msg.topic,
|
||||
payload,
|
||||
msg.qos,
|
||||
msg.retain,
|
||||
subscription.topic,
|
||||
timestamp,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def _mqtt_on_disconnect(self, _mqttc, _userdata, result_code: int) -> None:
|
||||
"""Disconnected callback."""
|
||||
self.connected = False
|
||||
dispatcher_send(self.hass, MQTT_DISCONNECTED)
|
||||
_LOGGER.info(
|
||||
"Disconnected from MQTT server %s:%s (%s)",
|
||||
self.host,
|
||||
self.port,
|
||||
result_code,
|
||||
)
|
||||
225
config/custom_components/blitzortung/sensor.py
Normal file
225
config/custom_components/blitzortung/sensor.py
Normal file
@@ -0,0 +1,225 @@
|
||||
import logging
|
||||
|
||||
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, DEGREE, UnitOfLength
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity, SensorStateClass
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
|
||||
from .const import (
|
||||
ATTR_LAT,
|
||||
ATTR_LIGHTNING_AZIMUTH,
|
||||
ATTR_LIGHTNING_COUNTER,
|
||||
ATTR_LON,
|
||||
ATTRIBUTION,
|
||||
DOMAIN,
|
||||
SERVER_STATS,
|
||||
SW_VERSION,
|
||||
)
|
||||
|
||||
ATTR_ICON = "icon"
|
||||
ATTR_LABEL = "label"
|
||||
ATTR_UNIT = "unit"
|
||||
ATTR_LIGHTNING_PROPERTY = "lightning_prop"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
integration_name = config_entry.data[CONF_NAME]
|
||||
|
||||
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
unique_prefix = config_entry.unique_id
|
||||
|
||||
sensors = [
|
||||
klass(coordinator, integration_name, unique_prefix)
|
||||
for klass in (DistanceSensor, AzimuthSensor, CounterSensor)
|
||||
]
|
||||
|
||||
async_add_entities(sensors, False)
|
||||
|
||||
config = hass.data[DOMAIN].get("config") or {}
|
||||
if config.get(SERVER_STATS):
|
||||
server_stat_sensors = {}
|
||||
|
||||
def on_message(message):
|
||||
if not message.topic.startswith("$SYS/broker/"):
|
||||
return
|
||||
topic = message.topic.replace("$SYS/broker/", "")
|
||||
if topic.startswith("load") and not topic.endswith("/1min"):
|
||||
return
|
||||
if topic.startswith("clients") and topic != "clients/connected":
|
||||
return
|
||||
sensor = server_stat_sensors.get(topic)
|
||||
if not sensor:
|
||||
sensor = ServerStatSensor(
|
||||
topic, coordinator, integration_name, unique_prefix
|
||||
)
|
||||
server_stat_sensors[topic] = sensor
|
||||
async_add_entities([sensor], False)
|
||||
sensor.on_message(topic, message)
|
||||
|
||||
coordinator.register_message_receiver(on_message)
|
||||
|
||||
|
||||
class BlitzortungSensor(SensorEntity):
|
||||
"""Define a Blitzortung sensor."""
|
||||
|
||||
def __init__(self, coordinator, integration_name, unique_prefix):
|
||||
"""Initialize."""
|
||||
self.coordinator = coordinator
|
||||
self._integration_name = integration_name
|
||||
self.entity_id = f"sensor.{integration_name}-{self.name}"
|
||||
self._unique_id = f"{unique_prefix}-{self.kind}"
|
||||
self._device_class = None
|
||||
self._attrs = {ATTR_ATTRIBUTION: ATTRIBUTION}
|
||||
|
||||
should_poll = False
|
||||
icon = "mdi:flash"
|
||||
device_class = None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
return self.coordinator.is_connected
|
||||
|
||||
@property
|
||||
def label(self):
|
||||
return self.kind.capitalize()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name."""
|
||||
return f"Lightning {self.label}"
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
return self._attrs
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique_id for this entity."""
|
||||
return self._unique_id
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Connect to dispatcher listening for entity data notifications."""
|
||||
# self.async_on_remove(self.coordinator.async_add_listener(self._update_sensor))
|
||||
self.coordinator.register_sensor(self)
|
||||
|
||||
async def async_update(self):
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
return {
|
||||
"name": f"{self._integration_name} Lightning Detector",
|
||||
"identifiers": {(DOMAIN, self._integration_name)},
|
||||
"model": "Lightning Detector",
|
||||
"sw_version": SW_VERSION,
|
||||
"entry_type": DeviceEntryType.SERVICE,
|
||||
}
|
||||
|
||||
def update_lightning(self, lightning):
|
||||
pass
|
||||
|
||||
def on_message(self, message):
|
||||
pass
|
||||
|
||||
def tick(self):
|
||||
pass
|
||||
|
||||
|
||||
class LightningSensor(BlitzortungSensor):
|
||||
INITIAL_STATE = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._attr_native_value = self.INITIAL_STATE
|
||||
|
||||
def tick(self):
|
||||
if self._attr_native_value != self.INITIAL_STATE and self.coordinator.is_inactive:
|
||||
self._attr_native_value = self.INITIAL_STATE
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class DistanceSensor(LightningSensor):
|
||||
kind = SensorDeviceClass.DISTANCE
|
||||
device_class = SensorDeviceClass.DISTANCE
|
||||
state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_native_unit_of_measurement = UnitOfLength.KILOMETERS
|
||||
|
||||
def update_lightning(self, lightning):
|
||||
self._attr_native_value = lightning["distance"]
|
||||
self._attrs[ATTR_LAT] = lightning[ATTR_LAT]
|
||||
self._attrs[ATTR_LON] = lightning[ATTR_LON]
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class AzimuthSensor(LightningSensor):
|
||||
kind = ATTR_LIGHTNING_AZIMUTH
|
||||
_attr_native_unit_of_measurement = DEGREE
|
||||
|
||||
def update_lightning(self, lightning):
|
||||
self._attr_native_value = lightning["azimuth"]
|
||||
self._attrs[ATTR_LAT] = lightning[ATTR_LAT]
|
||||
self._attrs[ATTR_LON] = lightning[ATTR_LON]
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class CounterSensor(LightningSensor):
|
||||
kind = ATTR_LIGHTNING_COUNTER
|
||||
_attr_native_unit_of_measurement = "↯"
|
||||
INITIAL_STATE = 0
|
||||
|
||||
def update_lightning(self, lightning):
|
||||
self._attr_native_value = self._attr_native_value + 1
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class ServerStatSensor(BlitzortungSensor):
|
||||
def __init__(self, topic, coordinator, integration_name, unique_prefix):
|
||||
self._topic = topic
|
||||
|
||||
topic_parts = topic.replace("$SYS/broker/", "").split("/")
|
||||
self.kind = "_".join(topic_parts)
|
||||
if self.kind.startswith("load"):
|
||||
self.data_type = float
|
||||
elif self.kind in ("uptime", "version"):
|
||||
self.data_type = str
|
||||
else:
|
||||
self.data_type = int
|
||||
|
||||
if self.kind == "clients_connected":
|
||||
self.kind = "server_stats"
|
||||
|
||||
self._name = " ".join(part.capitalize() for part in topic_parts)
|
||||
|
||||
super().__init__(coordinator, integration_name, unique_prefix)
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
if self.data_type in (int, float):
|
||||
return "." if self.kind == "server_stats" else " "
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def for_topic(cls, topic, coordinator, integration_name, unique_prefix):
|
||||
return cls(topic, coordinator, integration_name, unique_prefix)
|
||||
|
||||
def on_message(self, topic, message):
|
||||
if topic == self._topic:
|
||||
payload = message.payload.decode("utf-8")
|
||||
try:
|
||||
self._attr_native_value = self.data_type(payload)
|
||||
except ValueError:
|
||||
self._attr_native_value = str(payload)
|
||||
if self.hass:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def label(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
29
config/custom_components/blitzortung/strings.json
Normal file
29
config/custom_components/blitzortung/strings.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up Blitzortung lightning detection",
|
||||
"data": {
|
||||
"name": "Name of the integration instance"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Blitzortung Options",
|
||||
"description": "Set up Blitzortung lightning detection options",
|
||||
"data": {
|
||||
"radius": "Lightning detection radius (km / mi)",
|
||||
"time_window": "Time window (minutes, 0 - disabled)",
|
||||
"max_tracked_lightnings": "Max number of tracked lightnings",
|
||||
"latitude": "Latitude",
|
||||
"longitude": "Longitude"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
config/custom_components/blitzortung/translations/en.json
Normal file
29
config/custom_components/blitzortung/translations/en.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up Blitzortung lightning detection",
|
||||
"data": {
|
||||
"name": "Name of the integration instance"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Blitzortung Options",
|
||||
"description": "Set up Blitzortung lightning detection options",
|
||||
"data": {
|
||||
"radius": "Lightning detection radius (km / mi)",
|
||||
"time_window": "Time window (minutes, 0 - disabled)",
|
||||
"max_tracked_lightnings": "Max number of tracked lightnings",
|
||||
"latitude": "Latitude",
|
||||
"longitude": "Longitude"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
config/custom_components/blitzortung/translations/fi.json
Normal file
29
config/custom_components/blitzortung/translations/fi.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Määritä Blitzortung ukkostutka",
|
||||
"data": {
|
||||
"name": "Integraation instanssin nimi"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Blitzortung Asetukset",
|
||||
"description": "Määritä Blitzortung ukkostutkan asetukset",
|
||||
"data": {
|
||||
"radius": "Salamoiden seuranta-alue (km / mi)",
|
||||
"time_window": "Aikaikkuna (minuuttia, 0 - ei käytössä)",
|
||||
"max_tracked_lightnings": "Seurattavien salamoiden enimmäismäärä",
|
||||
"latitude": "Leveysaste",
|
||||
"longitude": "Pituusaste"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
config/custom_components/blitzortung/translations/fr.json
Normal file
29
config/custom_components/blitzortung/translations/fr.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Configurer Blitzortung détection de fourdre",
|
||||
"data": {
|
||||
"name": "Nom de l'instance de cette intégration"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Options Blitzortung ",
|
||||
"description": "Configurer les options de Blitzortung détection de foudre",
|
||||
"data": {
|
||||
"radius": "Rayon de detection de la foudre (km / mi)",
|
||||
"time_window": "Fenêtre de temps (minutes, 0 - désactivé)",
|
||||
"max_tracked_lightnings": "Nombre maximum d'éclairs suivis",
|
||||
"latitude": "Latitude",
|
||||
"longitude": "Longitude"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
config/custom_components/blitzortung/translations/hr.json
Normal file
29
config/custom_components/blitzortung/translations/hr.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Postavke Blitzortung senzora groma",
|
||||
"data": {
|
||||
"name": "Naziv instance integracije"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Blitzortung opcije",
|
||||
"description": "Podesite Blitzortung opcije otkrivanja groma",
|
||||
"data": {
|
||||
"radius": "Radijus prepoznavanja groma (km / mi)",
|
||||
"time_window": "Vremenski period (minuta, 0 - onemogućeno)",
|
||||
"max_tracked_lightnings": "Maksimalan broj praćenih gromova",
|
||||
"latitude": "Zemljopisna širina",
|
||||
"longitude": "Zemljopisna dužina"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
config/custom_components/blitzortung/translations/nb.json
Normal file
29
config/custom_components/blitzortung/translations/nb.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Sett opp lynregistrering av Blitzortung",
|
||||
"data": {
|
||||
"name": "Navnet på integrasjonsforekomsten"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Blitzortung-alternativer",
|
||||
"description": "Sett opp Blitzortung lyndeteksjonsalternativer",
|
||||
"data": {
|
||||
"radius": "Lyndeteksjonsradius (km / mi)",
|
||||
"time_window": "Tidsvindu (minutter, 0 - deaktivert)",
|
||||
"max_tracked_lightnings": "Maks antall sporede lyn",
|
||||
"latitude": "Breddegrad",
|
||||
"longitude": "Lengdegrad"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
config/custom_components/blitzortung/translations/nl.json
Normal file
29
config/custom_components/blitzortung/translations/nl.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Blitzortung bliksemdetectie instellen",
|
||||
"data": {
|
||||
"name": "Naam van de integratie-instantie"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Blitzortung Opties",
|
||||
"description": "Blitzortung bliksemdetectie opties instellen",
|
||||
"data": {
|
||||
"radius": "Bliksemdetectie-radius (km / mi)",
|
||||
"time_window": "Tijdvenster (minuten, 0 - uitgeschakeld)",
|
||||
"max_tracked_lightnings": "Max aantal gevolgde bliksems",
|
||||
"latitude": "Breedtegraad",
|
||||
"longitude": "Lengtegraad"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
config/custom_components/blitzortung/translations/pl.json
Normal file
29
config/custom_components/blitzortung/translations/pl.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Konfiguracja wykrywania błyskawic Blitzortung",
|
||||
"data": {
|
||||
"name": "Nazwa instancji integracji"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Opcje Blitzortung",
|
||||
"description": "Konfiguracja wykrywania błyskawic Blitzortung",
|
||||
"data": {
|
||||
"radius": "Promień wykrywania błyskawic (km / mi)",
|
||||
"time_window": "Okno czasowe (minuty, 0 - wyłączony)",
|
||||
"max_tracked_lightnings": "Maksymalna ilość śledzonych błyskawic",
|
||||
"latitude": "Szerokość geograficzna",
|
||||
"longitude": "Długość geograficzna"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
config/custom_components/blitzortung/translations/sl.json
Normal file
29
config/custom_components/blitzortung/translations/sl.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Postavke zaznavanja strel Blitzortung",
|
||||
"data": {
|
||||
"name": "Ime integracije"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"abort": {}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Možnosti Blitzortung",
|
||||
"description": "Nastavite možnosti zaznavanja strel Blitzortung",
|
||||
"data": {
|
||||
"radius": "Doseg zaznavanja strel (km / mi)",
|
||||
"time_window": "Časovni okvir (v minutah, 0 - disabled)",
|
||||
"max_tracked_lightnings": "Maksimalno število zaznanih strel",
|
||||
"latitude": "Zemljepisna širina središča zaznavanja",
|
||||
"longitude": "Zemljepisna dolžina središča zaznavanja"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1
config/custom_components/blitzortung/version.py
Normal file
1
config/custom_components/blitzortung/version.py
Normal file
@@ -0,0 +1 @@
|
||||
__version__ = "1.1.0"
|
||||
Reference in New Issue
Block a user