Home Assistant Git Exporter

This commit is contained in:
root
2024-08-09 06:45:02 +02:00
parent 60abdd866c
commit 80fc630f5e
624 changed files with 27739 additions and 4497 deletions

View File

@@ -22,6 +22,10 @@ HA Add-ons by alexbelgium:
maintainer: sguernion
slug: 3bff5a27
source: https://github.com/rfxcom2mqtt/hassio-addons
'Home Assistant Add-on: Zigbee2MQTT':
maintainer: Koen Kanters <koenkanters94@gmail.com>
slug: 45df7312
source: https://github.com/zigbee2mqtt/hassio-zigbee2mqtt
Home Assistant Community Add-ons:
maintainer: Franck Nijhof <frenck@addons.community>
slug: a0d7b954
@@ -34,6 +38,10 @@ JDeath Addons:
maintainer: jdeath
slug: 2effc9b9
source: https://github.com/jdeath/homeassistant-addons
Music Assistant:
maintainer: Music Assistant <marcelveldt@users.noreply.github.com>
slug: d5369777
source: https://github.com/music-assistant/home-assistant-addon
NSPanel Manager:
maintainer: NSPanel Manager Team <info@nspanelmanager.com>
slug: a5d2b728

View File

@@ -1 +1 @@
2024.5.5
2024.8.0

222
config/.shopping_list.json Normal file
View File

@@ -0,0 +1,222 @@
[
{
"name": " 2 flutes",
"id": "767de33c845f4167acc9f1dd523ee3ac",
"complete": true
},
{
"name": "Torsades panzani",
"id": "d3e86608a82349d297d7cb5058c500e7",
"complete": true
},
{
"name": "Essui tout",
"id": "0d317248afe54b6d8c4572775360a3b4",
"complete": true
},
{
"name": "2 Choco",
"id": "0057ba61739a45fa91d31eb84ef7056e",
"complete": true
},
{
"name": "Dates",
"id": "5b702fc3a47244c7bfce29e27eb6b19f",
"complete": false
},
{
"name": "Bananes",
"id": "6aa090cb21b141f8a2b37386e5896711",
"complete": false
},
{
"name": "Abricot",
"id": "f7c78e2d17f446e0a7c862e70328254e",
"complete": true
},
{
"name": "Beurre",
"id": "8cb3d58cd54f460c99a1ed2b265a3784",
"complete": true
},
{
"name": "creme dessert",
"id": "c56d75c68e48429593ee586c98c1823a",
"complete": true
},
{
"name": "Petit pois",
"id": "63e69a4131424922adf82dcde7199d01",
"complete": true
},
{
"name": "Jus de fruit",
"id": "12f80d83e3ee401ea708b75a662b7088",
"complete": true
},
{
"name": "allumettes",
"id": "de19c1fac9ce47b3a256fad7038f56e2",
"complete": true
},
{
"name": "cafe",
"id": "75e54ab01fd7493eaeeace72262db613",
"complete": true
},
{
"name": "sucre",
"id": "0dbdec86fa474f9e8b6e61917a3b811c",
"complete": true
},
{
"name": "lait",
"id": "ca9ce2289dca47a382c084f4a4a27637",
"complete": true
},
{
"name": "Creme vanilles",
"id": "14062f6ec2744523a03151b6d1ce3ff8",
"complete": true
},
{
"name": "Peches",
"id": "21d7b3d751aa4f6f9a00d9fe2bfb08b1",
"complete": true
},
{
"name": "Lave vaisselle",
"id": "c26afbde9f984049adb00c4b85ebf9ae",
"complete": true
},
{
"name": "Sirop citron",
"id": "a3399494241f4f6696a3ec8469f9c179",
"complete": true
},
{
"name": "Choco",
"id": "a74201251281498384db6944ff27ce59",
"complete": true
},
{
"name": "Lait",
"id": "78dfd45120b04d798ac63020635b096c",
"complete": true
},
{
"name": "Bleu",
"id": "6124a137218348009e641c02c7d36d9d",
"complete": true
},
{
"name": "Blanc",
"id": "2e603522f7dd4d50a31af1acd5986c89",
"complete": true
},
{
"name": "Vinaigre blanc",
"id": "a4f1a2acde9e4cf29804036b353d8a45",
"complete": true
},
{
"name": "Grain jaune mouche",
"id": "164ff5e89df14b13bc1e9fc4eccbf859",
"complete": true
},
{
"name": "eponge",
"id": "7bdaf185b84d408ca1bc510e3f184c8b",
"complete": true
},
{
"name": "Fromage blanc",
"id": "7eec2511dacc4cd2bc41057ccc46b70c",
"complete": true
},
{
"name": "fromage bleu",
"id": "f1cbdd4436d648e7b7634a7cf5c2796b",
"complete": true
},
{
"name": "Sauce salade",
"id": "9ed3ece90d494813acf6326c8c05a150",
"complete": true
},
{
"name": "Carrote",
"id": "f0f98a87cddc4dea88d9057ad6f59e59",
"complete": true
},
{
"name": "oignon jaune",
"id": "85d93f5718e64a9ebc68ddb485eeff70",
"complete": true
},
{
"name": "Carotte surgelé ",
"id": "f8fd9329ee304364a427aa5d05c5ba44",
"complete": true
},
{
"name": "Bieres",
"id": "1f02899f840e4c179c7e3cee11ca182c",
"complete": true
},
{
"name": "Yaourt nature",
"id": "8e967d12cebd474bb642f65bc6eafa8a",
"complete": true
},
{
"name": "Iles flottantes",
"id": "6b3cff6926c14915b9bb1496c2d49a3e",
"complete": true
},
{
"name": "Aliment poule",
"id": "941e3e0206e04e34924ec1a0bde9a46b",
"complete": true
},
{
"name": "Cassis",
"id": "71f61483307245e9a05c4085daeff9ab",
"complete": true
},
{
"name": "Riz",
"id": "a1640819666a404db09009d32494d5e3",
"complete": true
},
{
"name": "pernod",
"id": "01d581130273469f90a36cb18d5f23a1",
"complete": true
},
{
"name": "Citron",
"id": "49cff244225c4390896454e3b8e96f77",
"complete": true
},
{
"name": "Gateaux",
"id": "6c61d1dd626543e99cf1c4b069ca76a1",
"complete": true
},
{
"name": "Riz sachet",
"id": "6c3a91f180f8404bb6e88b942170d33e",
"complete": true
},
{
"name": "Cafe",
"id": "c86f4ee4837044a298b0299240bde9bf",
"complete": true
},
{
"name": "Tomates grappes",
"id": "5f4420e6430a456db1180ba262a26bd8",
"complete": false
}
]

View File

@@ -0,0 +1,67 @@
db_url: mysql://homeassistant:homeassistant@core-mariadb/homeassistant?charset=utf8mb4
purge_keep_days: 30
auto_purge: true
auto_repack: true
commit_interval: 5
include:
domains:
- light
- switch
- cover
#entity_globs:
# - sensor*
entities:
- sensor.tac2100_compteur_puissance_active
- sensor.tac2100_compteur_courant
- sensor.ecowitt_tempin
- sensor.select_sql_query
- sensor.ecu_current_power
- sensor.tac2100_solar_puissance_active
- sensor.disjoncteur_domo_z_power
- sensor.tac2100_compteur_energie_active_totale
- sensor.ecu_today_energy
- sensor.ecowitt_temp
- sensor.ecowitt_dewpoint
- sensor.ecowitt_humidity
- sensor.ecowitt_humidityin
- sensor.ecowitt_baromabs
- sensor.ecowitt_dailyrain
- sensor.ecowitt_eventrain
- sensor.ecowitt_hourlyrain
- sensor.ecowitt_monthlyrain
- sensor.ecowitt_rainrate
- sensor.ecowitt_solarradiation
- sensor.ecowitt_totalrain
- sensor.ecowitt_windgust
- sensor.ecowitt_baromrel
- sensor.ecowitt_uv
- sensor.presence_cuisine_motion_state
- sensor.ecowitt_winddir
- sensor.ecowitt_maxdailygust
- sensor.ecowitt_windspeed
- sensor.ecowitt_frostpoint
- sensor.ecowitt_feelslike
- sensor.qualite_air_co2
- sensor.geiger_wemos_geiger
- sensor.froling_s3_tdeg_fumee
- sensor.froling_s3_tdeg_board
- sensor.froling_s3_tdeg_depart_chauffage
- sensor.froling_s3_tdeg_chaudiere
- sensor.esp8266_tampon_temp_4d756f_temp_retour_chauff
- sensor.froling_s3_tampon_haut
- sensor.froling_s3_tampon_bas
- sensor.esp8266_tampon_temp_4d756f_tampon_milieu
- sensor.river2_battery_level
- sensor.tac2100_compteur_tension
- sensor.energy_pj1203_solar_power_b
- sensor.energy_pj1203_solar_power_a
- sensor.energy_pj1203_solar_current_b
- sensor.energy_pj1203_solar_current_a
- sensor.energy_pj1203_solar_energy_flow_a
- sensor.energy_pj1203_solar_energy_flow_b
- sensor.energy_pj1203_solar_energy_produced_a
- sensor.energy_pj1203_solar_energy_produced_b
- sensor.ecu_current_power
- sensor.dell_5520_battery_dell5520
- sensor.blitzortung_lightning_counter
- sensor.compteur_eclair_mensuel

View File

@@ -1,42 +0,0 @@
db_url: mysql://homeassistant:homeassistant@core-mariadb/homeassistant?charset=utf8mb4
purge_keep_days: 30
auto_purge: true
include:
domains:
- climate
- binary_sensor
- input_boolean
- input_datetime
- input_number
- input_select
- sensor
- switch
- person
- device_tracker
- light
exclude:
domains:
- camera
- zone
- automation
- sun
- weather
- cover
- group
- script
- pool_pump
entity_globs:
- sensor.clock*
- sensor.date*
- sensor.glances*
- sensor.load_*m
- sensor.time*
- sensor.uptime*
- device_tracker.nmap_tracker*
entities:
- camera.front_door
- sensor.memory_free
- sensor.memory_use
- sensor.memory_use_percent
- sensor.processor_use
- weather.openweathermap

View File

@@ -1,10 +1,10 @@
- platform: systemmonitor
resources:
- type: processor_use
# - type: processor_temperature
- type: memory_free
- type: disk_use_percent
- type: disk_use
- type: disk_free
- type: load_5m
#- platform: systemmonitor
#resources:
# - type: processor_use
# - type: processor_temperature
# - type: memory_free
# - type: disk_use_percent
# - type: disk_use
# - type: disk_free
# - type: load_5m

View File

@@ -1,17 +1,17 @@
# http://10.0.0.2:8123/hacs/repository/643579135
algorithm:
initial_temp: 1000
min_temp: 0.1
cooling_factor: 0.95
max_iteration_number: 1000
devices:
- name: "prise_ecran"
entity_id: "switch.prise_ecran"
power_max: 400
check_usable_template: "{{%if states('sensor.energy_pj1203_energy_flow_b') == 'producing' %}}"
duration_min: 6
duration_stop_min: 3
action_mode: "service_call"
service_activation: switch/turn_on"
deactivation_service: "switch/turn_off"
# algorithm:
# initial_temp: 1000
# min_temp: 0.1
# cooling_factor: 0.95
# max_iteration_number: 1000
# devices:
# - name: "prise_ecran"
# entity_id: "switch.prise_ecran"
# power_max: 400
# check_usable_template: "{{%if states('sensor.energy_pj1203_energy_flow_b') == 'producing' %}}"
# duration_min: 6
# duration_stop_min: 3
# action_mode: "service_call"
# service_activation: switch/turn_on"
# deactivation_service: "switch/turn_off"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,88 @@
blueprint:
name: On-Off schedule with state persistence
description: '# On-Off schedule with state persistence
A simple on-off schedule, with the addition of state persistence across disruptive
events, making sure the target device is always in the expected state.
📕 Full documentation regarding this blueprint is available [here](https://epmatt.github.io/awesome-ha-blueprints/docs/blueprints/automation/on_off_schedule_state_persistence).
🚀 This blueprint is part of the **[Awesome HA Blueprints](https://epmatt.github.io/awesome-ha-blueprints)
project**.
Version 2021.10.26
'
source_url: https://github.com/EPMatt/awesome-ha-blueprints/blob/main/blueprints/automation/on_off_schedule_state_persistence/on_off_schedule_state_persistence.yaml
domain: automation
input:
automation_target:
name: (Required) Automation target
description: The target which the automation will turn on and off based on the
provided schedule.
selector:
target: {}
on_time:
name: (Required) On Time
description: Time when the target should be placed in the on state.
selector:
time: {}
off_time:
name: (Required) Off Time
description: Time when the target should be placed in the off state.
selector:
time: {}
custom_trigger_event:
name: (Optional) Custom Trigger Event
description: A custom event which can trigger the state check (eg. a powercut
event reported by external integrations).
default: ''
selector:
text: {}
trigger_at_homeassistant_startup:
name: (Optional) Trigger at Home Assistant startup
description: Trigger the target state check and enforcement at Home Assistant
startup.
default: false
selector:
boolean: {}
variables:
off_time: !input 'off_time'
on_time: !input 'on_time'
trigger_at_homeassistant_startup: !input 'trigger_at_homeassistant_startup'
time_fmt: '%H:%M:%S'
first_event: '{{ on_time if strptime(on_time,time_fmt).time() < strptime(off_time,time_fmt).time()
else off_time }}'
second_event: '{{ on_time if strptime(on_time,time_fmt).time() >= strptime(off_time,time_fmt).time()
else off_time }}'
mode: single
max_exceeded: silent
trigger:
- platform: time
at:
- !input 'on_time'
- !input 'off_time'
- platform: homeassistant
event: start
- platform: event
event_type: !input 'custom_trigger_event'
condition:
- condition: template
value_template: '{{ trigger.platform!="homeassistant" or trigger_at_homeassistant_startup
}}'
action:
- choose:
- conditions:
- condition: template
value_template: '{{ now().time() >= strptime(first_event,time_fmt).time() and
now().time() < strptime(second_event,time_fmt).time() }}'
sequence:
- service: homeassistant.{{ "turn_on" if first_event == on_time else "turn_off"}}
target: !input 'automation_target'
default:
- service: homeassistant.{{ "turn_on" if second_event == on_time else "turn_off"}}
target: !input 'automation_target'

View File

@@ -0,0 +1,334 @@
blueprint:
name: Dim lights based on sun elevation
description: Adjust brightness of lights based on the current sun elevation. If
force debug is enabled, you need to execute this automation manually or let Home
Assitant restart before the change take effect.
source_url: https://github.com/EvTheFuture/homeassistant-blueprints/blob/master/blueprints/dim_lights_based_on_sun_elevation.yaml
domain: automation
input:
target_lights:
name: Lights
description: The lights to control the brightness of
selector:
target:
entity:
domain: light
max_brightness:
name: Maximum brightness percent
description: Brightness to set as the maximum brightness
default: 100
selector:
number:
min: 2.0
max: 100.0
unit_of_measurement: '%'
mode: slider
step: 1.0
min_brightness:
name: Minimum brightnes percent
description: Brightness to set as the minimum brightness
default: 1
selector:
number:
min: 1.0
max: 99.0
unit_of_measurement: '%'
mode: slider
step: 1.0
reverse:
name: Reverse brightness
description: If checked, light will start dim when sun starts to set (start
elevation value) and will be at full brightness when the elevation has reached
the end elevation value.
default: false
selector:
boolean: {}
allowance:
name: Change Allowance
description: How much can the brightnes be changed without this automation stop
updating the brightness. If set to 0% this automation will stop update the
brightness if the brightness has been changed at all since the last triggering
of this automation. If set to 100% this automation will keep on and update
the brightness even if you have manually changed the brightness to any other
value since the last trigger.
default: 0
selector:
number:
min: 0.0
max: 100.0
unit_of_measurement: '%'
mode: slider
step: 1.0
turn_on:
name: Turn on lights automatically
description: Turn on lights when sun is setting.
default: false
selector:
boolean: {}
start_elevation_setting:
name: Elevation of the sun to start dim the light when the sun is setting
default: 0
selector:
number:
min: -60.0
max: 60.0
unit_of_measurement: °
mode: slider
step: 0.5
end_elevation_setting:
name: Elevation of the sun when the light shall be fully dimmed when the sun
is setting
default: -30
selector:
number:
min: -60.0
max: 60.0
unit_of_measurement: °
mode: slider
step: 0.5
turn_off:
name: Turn off lights automatically
description: Turn off lights when sun has risen.
default: false
selector:
boolean: {}
start_elevation_rising:
name: Elevation of the sun to start brighten the light when the sun is rising
default: -8
selector:
number:
min: -60.0
max: 60.0
unit_of_measurement: °
mode: slider
step: 0.5
end_elevation_rising:
name: Elevation of the sun when the light shall have max brightness when the
sun is rising
default: 6
selector:
number:
min: -60.0
max: 60.0
unit_of_measurement: °
mode: slider
step: 0.5
transition_time:
name: Transition time in seconds between brightness values
default: 0
selector:
number:
min: 0.0
max: 5.0
unit_of_measurement: s
mode: slider
step: 0.25
debugging:
name: Debug logging
description: 'WARNING: Don''t enable this unless you have activated ''logger''
in your configuration.yaml file. Turn on debugging of this automation. In
order for this to take effect you need to manually trigger (EXECUTE) this
automation or let Home Assistant restart before debug will be turned on/off.'
default: false
selector:
boolean: {}
variables:
allowance_input: !input 'allowance'
allowance_value: '{{ allowance_input|float * 2.54 }}'
debugging: !input 'debugging'
target_lights: !input 'target_lights'
entity_list: "{%- if target_lights.entity_id is string -%}\n {{ [target_lights.entity_id]\
\ }}\n{%- else -%}\n {{ target_lights.entity_id }}\n{%- endif -%}"
transition_time: !input 'transition_time'
turn_on: !input 'turn_on'
turn_off: !input 'turn_off'
reverse: !input 'reverse'
start_setting: !input 'start_elevation_setting'
start_rising: !input 'start_elevation_rising'
end_setting: !input 'end_elevation_setting'
end_rising: !input 'end_elevation_rising'
max_brightness_input: !input 'max_brightness'
max_brightness: '{{ max_brightness_input|float }}'
min_brightness_input: !input 'min_brightness'
min_brightness: '{{ min_brightness_input|float }}'
trigger_is_event: '{{ trigger is defined and trigger.platform == ''event'' }}'
skip_event: '{{ trigger_is_event and trigger.event.data.service_data|length > 1
}}'
affected_entities: "{%- if skip_event -%}\n {{ [] }}\n{%- elif trigger is not defined\
\ or trigger.platform != 'event' or trigger.event.data.service_data is not defined\
\ or trigger.event.data.service_data.entity_id is not defined -%}\n {{ entity_list\
\ }}\n{%- else -%}\n {%- if trigger.event.data.service_data.entity_id is string\
\ -%}\n {%- set eids = [trigger.event.data.service_data.entity_id] -%}\n {%-\
\ else -%}\n {%- set eids = trigger.event.data.service_data.entity_id -%}\n\
\ {%- endif -%}\n {%- set data = namespace(e=[]) -%}\n {%- for e in eids -%}\n\
\ {%- if e in entity_list -%}\n {%- set data.e = data.e + [e] -%}\n \
\ {%- endif -%}\n {% endfor %}\n {{ data.e }}\n{%- endif -%}"
current_states: "{%- set data = namespace(e=[]) -%} {%- for e in entity_list -%}\n\
\ {%- set a = {'entity_id': e, 'state': states(e), 'brightness': state_attr(e,\
\ 'brightness')} -%}\n {%- set data.e = data.e + [a] -%}\n{%- endfor -%} {{ data.e\
\ }}"
error_msg: "{%- if start_setting|float <= end_setting|float -%}\n {{ 'Start elevation\
\ must be greater than end evevation when the sun is setting' }}\n{%- elif start_rising|float\
\ >= end_rising|float -%}\n {{ 'End elevation must be greater than start evevation\
\ when the sun is rising' }}\n{%- elif entity_list|length == 0 -%}\n {{ 'No valid\
\ entites specified or found' }}\n{%- endif -%}"
has_last: "{% if trigger is defined and trigger.platform == 'state' and trigger.from_state.entity_id\
\ == 'sun.sun' -%}\n {{ True }}\n{% else %}\n {{ False }}\n{% endif %}"
rising: '{{ state_attr(''sun.sun'', ''rising'') }}'
last_rising: '{% if has_last %}{{ trigger.from_state.attributes.rising }}{% else
%}{{ rising }}{% endif %}'
elevation: '{{ state_attr(''sun.sun'', ''elevation'') }}'
last_elevation: '{% if has_last %}{{ trigger.from_state.attributes.elevation }}{%
else %}{{ elevation }}{% endif %}'
force_turn_on: '{{ turn_on and not rising and last_elevation != "" and last_elevation
>= end_setting|float and elevation <= start_setting|float }}'
force_turn_off: '{{ turn_off and rising and last_elevation != "" and last_elevation
<= end_rising|float and elevation >= end_rising|float }}'
max_elevation: '{% if rising %}{{end_rising|float}}{% else %}{{start_setting|float}}{%
endif %}'
min_elevation: '{% if rising %}{{start_rising|float}}{% else %}{{end_setting|float}}{%
endif %}'
last_max_elevation: '{% if last_rising %}{{end_rising|float}}{% else %}{{start_setting|float}}{%
endif %}'
last_min_elevation: '{% if last_rising %}{{start_rising|float}}{% else %}{{end_setting|float}}{%
endif %}'
elevation_range: '{{ max_elevation - min_elevation }}'
last_elevation_range: '{{ last_max_elevation - last_min_elevation }}'
brightness_range: '{{ max_brightness - min_brightness }}'
delta_to_min: '{{ elevation - min_elevation }}'
last_delta_to_min: '{{ last_elevation|float - last_min_elevation }}'
full_percent_raw: '{% if delta_to_min / elevation_range < 0 %}0{% elif delta_to_min
/ elevation_range > 1 %}1{% else %}{{delta_to_min / elevation_range}}{% endif
%}'
full_percent: '{% if reverse %}{{1 - full_percent_raw}}{% else %}{{full_percent_raw}}{%
endif %}'
last_full_percent_raw: '{% if last_delta_to_min / elevation_range < 0 %}0{% elif
last_delta_to_min / elevation_range > 1 %}1{% else %}{{last_delta_to_min / elevation_range}}{%
endif %}'
last_full_percent: '{% if reverse %}{{1 - last_full_percent_raw}}{% else %}{{last_full_percent_raw}}{%
endif %}'
brightness_pct: '{{ full_percent * brightness_range + min_brightness }}'
last_brightness_pct: '{{ last_full_percent * brightness_range + min_brightness }}'
brightness: '{{ (brightness_pct * 2.54)|int }}'
last_brightness: '{{ (last_brightness_pct * 2.54)|int }}'
turn_on_entities: "{%- if force_turn_on -%}\n {%- set data = namespace(entities=[])\
\ -%}\n {%- for e in entity_list -%}\n {%- if not state_attr(e, 'supported_features')|bitwise_and(1)\
\ -%}\n {%- set data.entities = data.entities + [e] -%}\n {%- endif -%}\n\
\ {%- endfor -%}\n {{ data.entities }}\n{%- else -%}\n {{ [] }}\n{%- endif\
\ -%}"
dim_entities: "{%- set data = namespace(entities=[]) -%} {%- for e in entity_list\
\ -%}\n {%- set current_brightness = state_attr(e, 'brightness') -%}\n {%- set\
\ is_on = states(e) == 'on' -%}\n {%- set last_changed = (now() - states[e].last_changed)\
\ -%}\n {%- set can_dim = state_attr(e, 'supported_features')|bitwise_and(1)|bitwise_or(not\
\ is_on) -%}\n {#\n Set brightness and turn on if\n * Trigger is an event\
\ to turn on entity and it is currently off\n OR\n * dimming is supported\
\ by the entity AND light shall be turned on because the sun is setting (force_turn_on)\n\
\ OR\n * dimming is supported by the entity AND light is ON AND the current\
\ brightness differ from the new brightness\n AND\n * current brightness\
\ is equal to last set brightness (has not been changed by the user within the\
\ allowance)\n #}\n {%- if e in affected_entities -%}\n {%- if trigger_is_event\
\ and (not is_on or (is_on and last_changed.seconds < 2)) -%}\n {%- set data.entities\
\ = data.entities + [e] -%}\n {%- elif can_dim and force_turn_on -%}\n \
\ {%- set data.entities = data.entities + [e] -%}\n {%- elif can_dim and is_on\
\ and current_brightness != brightness and (current_brightness - last_brightness)|abs\
\ <= allowance_value -%}\n {%- set data.entities = data.entities + [e] -%}\n\
\ {%- endif -%}\n {%- endif -%}\n{%- endfor -%} {{ data.entities }}"
turn_off_entities: "{%- if force_turn_off -%}\n {{ entity_list }}\n{%- else -%}\n\
\ {{ [] }}\n{%- endif -%}"
trigger:
- platform: state
entity_id: sun.sun
attribute: elevation
- platform: event
event_type: call_service
event_data:
domain: light
service: turn_on
- platform: homeassistant
event: start
mode: queued
action:
- choose:
- conditions:
- condition: template
value_template: '{{ debugging and trigger is not defined }}'
sequence:
- service: logger.set_level
data:
homeassistant.components.blueprint.dim_lights_based_on_sun_elevation: DEBUG
- conditions:
- condition: template
value_template: '{{ debugging and trigger.platform == ''homeassistant'' and
trigger.event == ''start'' }}'
sequence:
- service: logger.set_level
data:
homeassistant.components.blueprint.dim_lights_based_on_sun_elevation: DEBUG
default:
- choose:
- conditions:
- condition: template
value_template: '{{ error_msg|length }}'
sequence:
- service: system_log.write
data:
level: error
logger: homeassistant.components.blueprint.dim_lights_based_on_sun_elevation
message: '{{ error_msg }}'
default:
- choose:
- conditions:
- condition: template
value_template: '{{ debugging }}'
sequence:
- service: system_log.write
data:
level: debug
logger: homeassistant.components.blueprint.dim_lights_based_on_sun_elevation
message: " DEBUG:\n skip_event: {{ skip_event }}\n allowance_value: {{\
\ allowance_value }}\n affected_entities: {{ affected_entities }}\n\n\
\ elevation: {{ elevation }} ({% if rising %}{{ start_rising ~ ', '\
\ ~ end_rising }}{% else %}{{ start_setting ~ ', ' ~ end_setting }}{%\
\ endif %})\n {% if last_elevation != \"\" -%}last elevation: {{ last_elevation\
\ }}\n{% endif %} new brightness: {{ brightness }}\n {% if last_elevation\
\ != \"\" -%}last brightness: {{ last_brightness }}\n{% endif %} \n\
\ current_states: {{ current_states }}\n \n force_turn_on: {{ force_turn_on\
\ }}\n force_turn_off: {{ force_turn_off }}\n \n entities: {{ entity_list\
\ }}\n \n turn_on_entities: {{ turn_on_entities }}\n \n dim_entities:\
\ {{ dim_entities }}\n \n turn_off_entities: {{ turn_off_entities }}\n\
\ \n {% if trigger is defined %}Triggered by: {{ trigger.platform }}\n\
{% endif %} {% if trigger is defined and trigger.platform == 'state'\
\ and trigger.from_state.entity_id == 'sun.sun' -%} from: (elevation:\
\ {{ trigger.from_state.attributes.elevation }}, azimuth: {{ trigger.from_state.attributes.azimuth\
\ }})\n to: (elevation: {{ trigger.to_state.attributes.elevation }},\
\ azimuth: {{ trigger.to_state.attributes.azimuth }})\n {% endif %}\
\ {% if trigger is defined and trigger.platform == 'event' -%} entity_id:\
\ {{ trigger.event.data.service_data.entity_id }}\n service_data_length:\
\ {{ trigger.event.data.service_data|length }}\n complete event data:\
\ {{ trigger.event.data }}\n {% endif %} "
default: []
- choose:
- conditions:
- condition: template
value_template: '{{ not skip_event and turn_off_entities|length > 0 }}'
sequence:
- service: light.turn_off
data:
entity_id: '{{ turn_off_entities }}'
- conditions:
- condition: template
value_template: '{{ not skip_event and turn_on_entities|length > 0 }}'
sequence:
- service: light.turn_on
data:
entity_id: '{{ turn_on_entities }}'
- conditions:
- condition: template
value_template: '{{ not skip_event and dim_entities|length > 0 }}'
sequence:
- service: light.turn_on
data:
entity_id: '{{ dim_entities }}'
brightness: '{{ brightness }}'
transition: '{{ transition_time }}'
default: []

View File

@@ -0,0 +1,51 @@
blueprint:
name: Cover - Immediate conditions
description: 'Version: 1.0.1'
domain: automation
input:
entities_condition:
name: Immediate conditions
description: Select all entities that match your immediate conditions
selector:
entity:
multiple: true
timer:
name: Timer
description: Timer used for remaining suspension time
selector:
entity:
domain: timer
multiple: false
position:
name: Desired roller shutter position
selector:
entity:
domain: input_number
multiple: false
automation:
name: Roller shutter positioning
description: Automation containing roller shutter positioning rules
selector:
entity:
domain: automation
multiple: false
source_url: https://github.com/FabienYt/home-assistant/blob/main/blueprints/automation/FabienYt/cover_immediate.yaml
mode: restart
max_exceeded: silent
trigger:
- platform: state
entity_id: !input entities_condition
action:
- service: input_number.set_value
target:
entity_id: !input position
data:
value: -1
- service: timer.cancel
target:
entity_id: !input timer
- service: automation.trigger
data:
skip_condition: true
target:
entity_id: !input automation

View File

@@ -0,0 +1,137 @@
blueprint:
name: HASPone activates a selected page after a specified period of inactivity
description: '
## Blueprint Version: `1.05.00`
# Description
Activates a selected page after a specified period of inactivity.
## HASPone Page and Button Reference
The images below show each available HASPone page along with the layout of available
button objects.
<details>
| Page 0 | Pages 1-3 | Pages 4-5 |
|--------|-----------|-----------|
| ![Page 0](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p0_Init_Screen.png)
| ![Pages 1-3](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p1-p3_4buttons.png)
| ![Pages 4-5](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p4-p5_3sliders.png)
|
| Page 6 | Page 7 | Page 8 |
|--------|--------|--------|
| ![Page 6](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p6_8buttons.png)
| ![Page 7](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p7_12buttons.png)
| ![Page 8](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p8_5buttons+1slider.png)
|
| Page 9 | Page 10 | Page 11 |
|--------|---------|---------|
| ![Page 9](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p9_9buttons.png)
| ![Page 10](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p10_5buttons.png)
| ![Page 11](https://raw.githubusercontent.com/HASwitchPlate/HASPone/main/images/NextionUI_p11_1button+1slider.png)
</details>
'
domain: automation
input:
haspdevice:
name: HASPone Device
description: Select the HASPone device
selector:
device:
integration: mqtt
manufacturer: HASwitchPlate
model: HASPone v1.0.0
multiple: false
targetpage:
name: Page to activate
description: Select a destination page for this button to activate.
default: 1
selector:
number:
min: 1.0
max: 11.0
mode: slider
unit_of_measurement: page
step: 1.0
idletime:
name: Idle Time
description: Idle time in seconds
default: 30
selector:
number:
min: 5.0
max: 900.0
step: 5.0
mode: slider
unit_of_measurement: seconds
source_url: https://github.com/HASwitchPlate/HASPone/blob/main/Home_Assistant/blueprints/hasp_Activate_Page_on_Idle.yaml
mode: restart
max_exceeded: silent
variables:
haspdevice: !input haspdevice
haspname: "{%- for entity in device_entities(haspdevice) -%}\n {%- if entity|regex_search(\"^sensor\\..+_sensor(?:_\\d+|)$\")
-%}\n {{- entity|regex_replace(find=\"^sensor\\.\", replace=\"\", ignorecase=true)|regex_replace(find=\"_sensor(?:_\\d+|)$\",
replace=\"\", ignorecase=true) -}}\n {%- endif -%}\n{%- endfor -%}"
targetpage: !input targetpage
idletime: !input idletime
pagecommandtopic: '{{ "hasp/" ~ haspname ~ "/command/page" }}'
activepage: "{%- set activepage = namespace() -%} {%- for entity in device_entities(haspdevice)
-%}\n {%- if entity|regex_search(\"^number\\..*_active_page(?:_\\d+|)$\") -%}\n
\ {%- set activepage.entity=entity -%}\n {%- endif -%}\n{%- endfor -%} {{ states(activepage.entity)
| int(default=-1) }}"
trigger_variables:
haspdevice: !input haspdevice
haspname: "{%- for entity in device_entities(haspdevice) -%}\n {%- if entity|regex_search(\"^sensor\\..+_sensor(?:_\\d+|)$\")
-%}\n {{- entity|regex_replace(find=\"^sensor\\.\", replace=\"\", ignorecase=true)|regex_replace(find=\"_sensor(?:_\\d+|)$\",
replace=\"\", ignorecase=true) -}}\n {%- endif -%}\n{%- endfor -%}"
haspsensor: "{%- for entity in device_entities(haspdevice) -%}\n {%- if entity|regex_search(\"^sensor\\..+_sensor(?:_\\d+|)$\")
-%}\n {{ entity }}\n {%- endif -%}\n{%- endfor -%}"
jsontopic: '{{ "hasp/" ~ haspname ~ "/state/json" }}'
targetpage: !input targetpage
pagejsonpayload: '{"event":"page","value":{{targetpage}}}'
trigger:
- platform: mqtt
topic: '{{jsontopic}}'
condition:
- condition: template
value_template: '{{ is_state(haspsensor, ''ON'') }}'
- condition: template
value_template: "{{-\n (trigger.payload_json.event is defined)\nand\n (trigger.payload_json.event
== 'page')\nand\n (trigger.payload_json.value is defined)\nand\n (trigger.payload_json.value
!= targetpage)\n-}}"
action:
- delay:
seconds: '{{idletime|int}}'
- condition: template
value_template: "{%- set currentpage = namespace() -%} {%- for entity in device_entities(haspdevice)
-%}\n {%- if entity|regex_search(\"^number\\..*_active_page(?:_\\d+|)$\") -%}\n
\ {%- set currentpage.entity=entity -%}\n {%- endif -%}\n{%- endfor -%} {%-
if states(currentpage.entity) == targetpage -%}\n {{false}}\n{%- else -%}\n {{true}}\n{%-
endif -%}"
- service: mqtt.publish
data:
topic: '{{pagecommandtopic}}'
payload: '{{targetpage}}'
retain: true

View File

@@ -0,0 +1,544 @@
blueprint:
name: Aqara Magic Cube
description: Control anything using Aqara Magic Cube.
domain: automation
input:
remote:
name: Magic Cube
description: Select the Aqara Magic Cube device
selector:
device:
integration: zha
manufacturer: LUMI
flip_90:
name: Flip 90 degrees
description: 'Actions to run when cube flips 90 degrees.
This cancels all specific 90 degrees functions.
e.g From side 1 to side 2 will be the same as from side 6 to side 2'
default: false
selector:
boolean: {}
cube_flip_90:
name: Flip cube 90 degrees
description: Action to run when cube flips 90 degrees. This only works if 'Flip
90 degrees' is toggled
default: []
selector:
action: {}
flip_180:
name: Flip 180 degrees
description: 'Actions to run when cube flips 180 degrees.
This cancels all specific 180 degrees functions
e.g From side 1 to side 4 will be the same as from side 5 to side 2'
default: false
selector:
boolean: {}
cube_flip_180:
name: Flip cube 180 degrees
description: Action to run when cube flips 180 degrees. This only works if 'Flip
180 degrees' is toggled
default: []
selector:
action: {}
slide_any_side:
name: Slide any side
description: 'Actions to run when cube slides on any side.
This cancels all specific ''slide'' functions
e.g Slide on side 1 will be the same as slide on side 2'
default: false
selector:
boolean: {}
cube_slide_any:
name: Slide cube on any side
description: Action to run when cube slides on any slide. This only works if
'Slide any side' is toggled
default: []
selector:
action: {}
knock_any_side:
name: Knock on any side
description: 'Actions to run when knocking cube regardless of the side.
This cancels all specific ''knock'' functions
e.g Knock on side 1 will be the same as knocking side 2'
default: false
selector:
boolean: {}
cube_knock_any:
name: Knock cube on any side
description: Action to run when knocking cube on any side. This only works if
'Knock on any side' is toggled
default: []
selector:
action: {}
one_to_two:
name: From side 1 to side 2
description: Action to run when cube goes from side 1 to side 2
default: []
selector:
action: {}
one_to_three:
name: From side 1 to side 3
description: Action to run when cube goes from side 1 to side 3
default: []
selector:
action: {}
one_to_four:
name: From side 1 to side 4
description: Action to run when cube goes from side 1 to side 4
default: []
selector:
action: {}
one_to_five:
name: From side 1 to side 5
description: Action to run when cube goes from side 1 to side 5
default: []
selector:
action: {}
one_to_six:
name: From side 1 to side 6
description: Action to run when cube goes from side 1 to side 6
default: []
selector:
action: {}
two_to_one:
name: From side 2 to side 1
description: Action to run when cube goes from side 2 to side 1
default: []
selector:
action: {}
two_to_three:
name: From side 2 to side 3
description: Action to run when cube goes from side 2 to side 3
default: []
selector:
action: {}
two_to_four:
name: From side 2 to side 4
description: Action to run when cube goes from side 2 to side 4
default: []
selector:
action: {}
two_to_five:
name: From side 2 to side 5
description: Action to run when cube goes from side 2 to side 5
default: []
selector:
action: {}
two_to_six:
name: From side 2 to side 6
description: Action to run when cube goes from side 2 to side 6
default: []
selector:
action: {}
three_to_one:
name: From side 3 to side 1
description: Action to run when cube goes from side 3 to side 1
default: []
selector:
action: {}
three_to_two:
name: From side 3 to side 2
description: Action to run when cube goes from side 3 to side 2
default: []
selector:
action: {}
three_to_four:
name: From side 3 to side 4
description: Action to run when cube goes from side 3 to side 4
default: []
selector:
action: {}
three_to_five:
name: From side 3 to side 5
description: Action to run when cube goes from side 3 to side 5
default: []
selector:
action: {}
three_to_six:
name: From side 3 to side 6
description: Action to run when cube goes from side 3 to side 6
default: []
selector:
action: {}
four_to_one:
name: From side 4 to side 1
description: Action to run when cube goes from side 4 to side 1
default: []
selector:
action: {}
four_to_two:
name: From side 4 to side 2
description: Action to run when cube goes from side 4 to side 2
default: []
selector:
action: {}
four_to_three:
name: From side 4 to side 3
description: Action to run when cube goes from side 4 to side 3
default: []
selector:
action: {}
four_to_five:
name: From side 4 to side 5
description: Action to run when cube goes from side 4 to side 5
default: []
selector:
action: {}
four_to_six:
name: From side 4 to side 6
description: Action to run when cube goes from side 4 to side 6
default: []
selector:
action: {}
five_to_one:
name: From side 5 to side 1
description: Action to run when cube goes from side 5 to side 1
default: []
selector:
action: {}
five_to_two:
name: From side 5 to side 2
description: Action to run when cube goes from side 5 to side 2
default: []
selector:
action: {}
five_to_three:
name: From side 5 to side 3
description: Action to run when cube goes from side 5 to side 3
default: []
selector:
action: {}
five_to_four:
name: From side 5 to side 4
description: Action to run when cube goes from side 5 to side 4
default: []
selector:
action: {}
five_to_six:
name: From side 5 to side 6
description: Action to run when cube goes from side 5 to side 6
default: []
selector:
action: {}
six_to_one:
name: From side 6 to side 1
description: Action to run when cube goes from side 6 to side 1
default: []
selector:
action: {}
six_to_two:
name: From side 6 to side 2
description: Action to run when cube goes from side 6 to side 2
default: []
selector:
action: {}
six_to_three:
name: From side 6 to side 3
description: Action to run when cube goes from side 6 to side 3
default: []
selector:
action: {}
six_to_four:
name: From side 6 to side 4
description: Action to run when cube goes from side 6 to side 4
default: []
selector:
action: {}
six_to_five:
name: From side 6 to side 5
description: Action to run when cube goes from side 6 to side 5
default: []
selector:
action: {}
one_to_one:
name: Knock - Side 1
description: Action to run when knocking on side 1
default: []
selector:
action: {}
two_to_two:
name: Knock - Side 2
description: Action to run when knocking on side 2
default: []
selector:
action: {}
three_to_three:
name: Knock - Side 3
description: Action to run when knocking on side 3
default: []
selector:
action: {}
four_to_four:
name: Knock - Side 4
description: Action to run when knocking on side 4
default: []
selector:
action: {}
five_to_five:
name: Knock - Side 5
description: Action to run when knocking on side 5
default: []
selector:
action: {}
six_to_six:
name: Knock - Side 6
description: Action to run when knocking on side 6
default: []
selector:
action: {}
slide_on_one:
name: Slide - Side 1 up
description: Action to run when slides with Side 1 up
default: []
selector:
action: {}
slide_on_two:
name: Slide - Side 2 up
description: Action to run when slides with Side 2 up
default: []
selector:
action: {}
slide_on_three:
name: Slide - Side 3 up
description: Action to run when slides with Side 3 up
default: []
selector:
action: {}
slide_on_four:
name: Slide - Side 4 up
description: Action to run when slides with Side 4 up
default: []
selector:
action: {}
slide_on_five:
name: Slide - Side 5 up
description: Action to run when slides with Side 5 up
default: []
selector:
action: {}
slide_on_six:
name: Slide - Side 6 up
description: Action to run when slides with Side 6 up
default: []
selector:
action: {}
cube_wake:
name: Wake up the cube
description: Action to run when cube wakes up
default: []
selector:
action: {}
cube_drop:
name: Cube drops
description: Action to run when cube drops
default: []
selector:
action: {}
cube_shake:
name: Shake cube
description: Action to run when you shake the cube
default: []
selector:
action: {}
rotate_right:
name: Rotate right
description: Action to run when cube rotates right
default: []
selector:
action: {}
rotate_left:
name: Rotate left
description: Action to run when cube rotates left
default: []
selector:
action: {}
source_url: https://community.home-assistant.io/t/aqara-magic-cube-zha-51-actions/270829
mode: restart
max_exceeded: silent
trigger:
- platform: event
event_type: zha_event
event_data:
device_id: !input 'remote'
action:
- variables:
command: '{{ trigger.event.data.command }}'
value: '{{ trigger.event.data.args.value }}'
flip_degrees: '{{ trigger.event.data.args.flip_degrees }}'
relative_degrees: '{{ trigger.event.data.args.relative_degrees }}'
flip_90: !input 'flip_90'
flip_180: !input 'flip_180'
slide_any_side: !input 'slide_any_side'
knock_any_side: !input 'knock_any_side'
flip90: 64
flip180: 128
slide: 256
knock: 512
shake: 0
drop: 3
activated_face: "\n{% if command == \"slide\" or command == \"knock\" %}\n\n \
\ {% if trigger.event.data.args.activated_face == 1 %} 1\n\n {% elif trigger.event.data.args.activated_face\
\ == 2 %} 5\n\n {% elif trigger.event.data.args.activated_face == 3 %} 6\n\n\
\ {% elif trigger.event.data.args.activated_face == 4 %} 4\n\n {% elif trigger.event.data.args.activated_face\
\ == 5 %} 2\n\n {% elif trigger.event.data.args.activated_face == 6 %} 3\n\n\
\ {% endif %}\n\n{% elif command == 'flip' %}\n\n {{ trigger.event.data.args.activated_face\
\ | int }}\n\n{% endif %}\n"
from_face: "\n{% if command == \"flip\" and flip_degrees == 90 %}\n\n {{ ((value\
\ - flip90 - (trigger.event.data.args.activated_face - 1)) / 8) + 1 | int }}\n\
\n{% endif %}\n"
- choose:
- conditions:
- '{{ command == ''rotate_right'' }}'
sequence: !input 'rotate_right'
- conditions:
- '{{ command == ''rotate_left'' }}'
sequence: !input 'rotate_left'
- conditions:
- '{{ command == ''checkin'' }}'
sequence: !input 'cube_wake'
- conditions:
- '{{ value == shake }}'
sequence: !input 'cube_shake'
- conditions:
- '{{ value == drop }}'
sequence: !input 'cube_drop'
- conditions:
- '{{ command == ''knock'' and knock_any_side }}'
sequence: !input 'cube_knock_any'
- conditions:
- '{{ command == ''slide'' and slide_any_side }}'
sequence: !input 'cube_slide_any'
- conditions:
- '{{ flip_degrees == 90 and flip_90 }}'
sequence: !input 'cube_flip_90'
- conditions:
- '{{ flip_degrees == 180 and flip_180 }}'
sequence: !input 'cube_flip_180'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 1 }}'
sequence:
- choose:
- conditions: '{{ from_face == 2 }}'
sequence: !input 'two_to_one'
- conditions: '{{ from_face == 3 }}'
sequence: !input 'three_to_one'
- conditions: '{{ from_face == 5 }}'
sequence: !input 'five_to_one'
- conditions: '{{ from_face == 6 }}'
sequence: !input 'six_to_one'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 2 }}'
sequence:
- choose:
- conditions: '{{ from_face == 1 }}'
sequence: !input 'one_to_two'
- conditions: '{{ from_face == 3 }}'
sequence: !input 'three_to_two'
- conditions: '{{ from_face == 4 }}'
sequence: !input 'four_to_two'
- conditions: '{{ from_face == 6 }}'
sequence: !input 'six_to_two'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 3 }}'
sequence:
- choose:
- conditions: '{{ from_face == 1 }}'
sequence: !input 'one_to_three'
- conditions: '{{ from_face == 2 }}'
sequence: !input 'two_to_three'
- conditions: '{{ from_face == 4 }}'
sequence: !input 'four_to_three'
- conditions: '{{ from_face == 5 }}'
sequence: !input 'five_to_three'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 4 }}'
sequence:
- choose:
- conditions: '{{ from_face == 2 }}'
sequence: !input 'two_to_four'
- conditions: '{{ from_face == 3 }}'
sequence: !input 'three_to_four'
- conditions: '{{ from_face == 5 }}'
sequence: !input 'five_to_four'
- conditions: '{{ from_face == 6 }}'
sequence: !input 'six_to_four'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 5 }}'
sequence:
- choose:
- conditions: '{{ from_face == 1 }}'
sequence: !input 'one_to_five'
- conditions: '{{ from_face == 3 }}'
sequence: !input 'three_to_five'
- conditions: '{{ from_face == 4 }}'
sequence: !input 'four_to_five'
- conditions: '{{ from_face == 6 }}'
sequence: !input 'six_to_five'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 6 }}'
sequence:
- choose:
- conditions: '{{ from_face == 1 }}'
sequence: !input 'one_to_six'
- conditions: '{{ from_face == 2 }}'
sequence: !input 'two_to_six'
- conditions: '{{ from_face == 4 }}'
sequence: !input 'four_to_six'
- conditions: '{{ from_face == 5 }}'
sequence: !input 'five_to_six'
- conditions:
- '{{ value == flip180 + activated_face - 1 }}'
sequence:
- choose:
- conditions: '{{ activated_face == 1 }}'
sequence: !input 'four_to_one'
- conditions: '{{ activated_face == 2 }}'
sequence: !input 'five_to_two'
- conditions: '{{ activated_face == 3 }}'
sequence: !input 'six_to_three'
- conditions: '{{ activated_face == 4 }}'
sequence: !input 'one_to_four'
- conditions: '{{ activated_face == 5 }}'
sequence: !input 'two_to_five'
- conditions: '{{ activated_face == 6 }}'
sequence: !input 'three_to_six'
- conditions:
- '{{ value == knock + activated_face - 1 }}'
sequence:
- choose:
- conditions: '{{ activated_face == 1 }}'
sequence: !input 'one_to_one'
- conditions: '{{ activated_face == 2 }}'
sequence: !input 'two_to_two'
- conditions: '{{ activated_face == 3 }}'
sequence: !input 'three_to_three'
- conditions: '{{ activated_face == 4 }}'
sequence: !input 'four_to_four'
- conditions: '{{ activated_face == 5 }}'
sequence: !input 'five_to_five'
- conditions: '{{ activated_face == 6 }}'
sequence: !input 'six_to_six'
- conditions:
- '{{ value == slide + activated_face - 1 }}'
sequence:
- choose:
- conditions: '{{ activated_face == 1 }}'
sequence: !input 'slide_on_one'
- conditions: '{{ activated_face == 2 }}'
sequence: !input 'slide_on_two'
- conditions: '{{ activated_face == 3 }}'
sequence: !input 'slide_on_three'
- conditions: '{{ activated_face == 4 }}'
sequence: !input 'slide_on_four'
- conditions: '{{ activated_face == 5 }}'
sequence: !input 'slide_on_five'
- conditions: '{{ activated_face == 6 }}'
sequence: !input 'slide_on_six'

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,213 @@
blueprint:
name: Smart Light Dimmer V3.0
description: 'Version 3.0 ! BETA !
Switch or dim lights based on the value of a light sensor.
Light color and temperature can be specified (fixed value).
Settings for brightness, temperature and color above and below min and max values
of the sensor can be set.
'
source_url: https://gist.github.com/Twanne/56791e1917c751de7a72b16ee5e067cd
domain: automation
input:
schedule_start:
name: Schedule start time
description: Automation only runs after this time.
selector:
time: {}
schedule_stop:
name: Schedule stop time
description: Automation does not run after this time.
selector:
time: {}
schedule_days:
name: Run on these days
description: 'Days on which the automation will run.
Write days in short form, seperated by punctuation marks and/or spaces.
(i.e.: mon, tue, wed,...)
'
selector:
text: {}
light_sensor_entity:
name: Light Sensor
selector:
entity:
domain: sensor
device_class: illuminance
multiple: false
max_brightness_value:
name: Maximum ambient light value
description: Maximum ambient light value measured.
default: 500
selector:
number:
min: 0.0
max: 1000.0
step: 10.0
unit_of_measurement: lx
mode: slider
min_brightness_value:
name: Minimum ambient light value
description: Minimum ambient light value measured.
default: 0
selector:
number:
min: 0.0
max: 1000.0
step: 10.0
unit_of_measurement: lx
mode: slider
light_value_1:
name: Dimming value 1
description: Brightness of the light at maximum ambient light.
default: 0
selector:
number:
min: 0.0
max: 100.0
step: 1.0
mode: slider
unit_of_measurement: '%'
light_value_2:
name: Dimming value 2
description: Brightness of the light at minimum ambient light.
default: 100
selector:
number:
min: 0.0
max: 100.0
step: 1.0
mode: slider
unit_of_measurement: '%'
light_brightness_over_max:
name: Brightness when over max ambient.
description: Brightness of the light when the sensor measures over the maximum
ambient light value.
default: 0
selector:
number:
min: 0.0
max: 100.0
step: 1.0
mode: slider
unit_of_measurement: '%'
light_temp_over_max:
name: Light temperature when over max ambient.
description: Temperature of the light when over maximum ambient light value.
default: 2000
selector:
color_temp: {}
light_color_over_max:
name: Light color when over max ambient.
description: Color of the light when over maximum ambient light value.
selector:
color_rgb: {}
light_brightness_under_min:
name: Brightness when under min ambient.
description: Brightness of the light when the sensor measures under the minimum
ambient light value.
default: 100
selector:
number:
min: 0.0
max: 100.0
step: 1.0
mode: slider
unit_of_measurement: '%'
light_temp_under_min:
name: Light temperature when under min ambient.
description: Temperature of the light when under minimum ambient light value.
default: 2000
selector:
color_temp: {}
light_color_under_min:
name: Light color when under min ambient.
description: Color of the light when over under minimum light value.
selector:
color_rgb: {}
light_temp:
name: Light temperature
description: Temperature of the light when between minimum and maximum measured
light values.
default: 2000
selector:
color_temp: {}
light_color:
name: Light color
description: Color of the light when between minimum and maximum measured light
values.
selector:
color_rgb: {}
target_light:
name: Target lights
description: which lights do you want to control?
selector:
target:
entity:
domain: light
mode: single
variables:
light_sensor: !input light_sensor_entity
maxB: !input max_brightness_value
minB: !input min_brightness_value
light1: !input light_value_1
light2: !input light_value_2
slope: '{{ ( light1 - light2 ) / ( maxB - minB ) }}'
constant: '{{ light1 - ( slope * maxB ) }}'
days: !input schedule_days
trigger:
platform: state
entity_id: !input light_sensor_entity
condition:
- condition: numeric_state
entity_id: !input light_sensor_entity
above: !input min_brightness_value
- condition: time
after: !input schedule_start
before: !input schedule_stop
- condition: template
value_template: '{{ now().strftime(''%a'') | lower in days }}'
action:
- choose:
- conditions:
- condition: numeric_state
entity_id: !input light_sensor_entity
above: maxB
sequence:
- service: light.turn_on
data:
brightness_pct: !input light_brightness_over_max
color_temp: !input light_temp_over_max
color_rgb: !input light_color_over_max
target: !input target_light
- conditions:
- condition: numeric_state
entity_id: !input light_sensor_entity
below: minB
sequence:
- service: light.turn_on
data:
brightness_pct: !input light_brightness_under_min
color_temp: !input light_temp_under_min
color_rgb: !input light_color_under_min
target: !input target_light
- conditions:
- condition: numeric_state
entity_id: !input light_sensor_entity
below: maxB
above: minB
sequence:
- service: light.turn_on
data:
brightness_pct: '{{ (( slope_brightness * states(light_sensor)|int ) + constant_brightness)|round
}}'
color_temp: '{{ (( slope_temperature * states(light_sensor)|int ) + constant_temperature)|round
}}'
color_rgb: !input light_color
target: !input target_light

View File

@@ -0,0 +1,84 @@
blueprint:
name: Motion-activated Switch
description: Turn on a switch when motion is detected.
domain: automation
input:
motion_entity:
name: Motion Sensor
selector:
entity:
domain: binary_sensor
device_class: motion
lightsensor_entity:
name: Illuminance Sensor
selector:
entity:
domain: sensor
device_class: illuminance
illuminace_level:
name: Max Illuminance
description: Maximal immuminance level in lux. If illuminance is higher, light
will not be enabled
default: 300
selector:
number:
min: 0.0
max: 5000.0
unit_of_measurement: lux
mode: slider
step: 1.0
switch_target:
name: Switch
selector:
target:
entity:
domain: switch
time_from:
name: Active from
description: A time input which defines the time from which motion is detected
selector:
time: {}
time_to:
name: Active to
description: A time input which defines the time to which motion is detected
selector:
time: {}
no_motion_wait:
name: Wait time
description: Time to leave the light on after last motion is detected.
default: 120
selector:
number:
min: 0.0
max: 3600.0
unit_of_measurement: seconds
mode: slider
step: 1.0
source_url: https://community.home-assistant.io/t/turn-on-a-switch-when-motion-is-detected/255709
mode: restart
max_exceeded: silent
trigger:
- platform: state
entity_id: !input 'motion_entity'
from: 'off'
to: 'on'
condition:
- condition: and
conditions:
- condition: time
after: !input 'time_from'
before: !input 'time_to'
- condition: numeric_state
entity_id: !input 'lightsensor_entity'
below: !input 'illuminace_level'
action:
- service: switch.turn_on
target: !input 'switch_target'
- wait_for_trigger:
platform: state
entity_id: !input 'motion_entity'
from: 'on'
to: 'off'
- delay: !input 'no_motion_wait'
- service: switch.turn_off
target: !input 'switch_target'

View File

@@ -0,0 +1,121 @@
blueprint:
name: Thermostat TPI
description: Thermostat TPI (Time Propertional & Integral)
domain: automation
input:
coeff_c:
name: Coefficient C
description: coefficient multiplicateur de la différence entre la consigne et
éa température intérieure pour le calcul de la puissance (0.6 conseillé)
selector:
number:
min: 0.0
max: 1.0
step: 0.01
mode: slider
coeff_t:
name: Coefficient T
description: coefficient multiplicateur de la différence entre la consigne et
éa température extérieure pour le calcul de la puissance (0.01 conseillé)
selector:
number:
min: 0.0
max: 0.1
step: 0.001
mode: slider
entity_consigne:
name: Consigne
description: Champs d'entrée de la température de consigne (input number).
selector:
entity:
domain: input_number
entity_temp_ext:
name: Température extérieure
description: Sonde de mesure de la température extérieure (sensor)
selector:
entity:
domain: sensor
device_class: temperature
entity_temp_int:
name: Température intérieure
description: Sonde de mesure de la température intérieure (sensor)
selector:
entity:
domain: sensor
device_class: temperature
entity_fenetre:
name: Fenètre
description: Capteur d'ouverture de fenêtre (sensor)
selector:
entity:
domain: binary_sensor
device_class: opening
entity_puissance:
name: Puissance
description: Champs d'affichage de la puissance (input_number)
selector:
entity:
domain: input_number
entity_chauffage:
name: Chauffage
description: Interrupteur marche / arrêt du chauffage (switch)
selector:
entity:
domain: switch
source_url: https://github.com/argonaute199/chauffage-home-assistant/blob/main/blueprint/thermostat_tpi.yaml
variables:
coeff_c: !input 'coeff_c'
coeff_t: !input 'coeff_t'
entity_temp_int: !input 'entity_temp_int'
entity_temp_ext: !input 'entity_temp_ext'
entity_fenetre: !input 'entity_fenetre'
trigger:
- platform: time_pattern
minutes: /10
- platform: state
entity_id: !input 'entity_consigne'
- platform: state
entity_id: !input 'entity_fenetre'
action:
- alias: récupération des données
variables:
entity_consigne: !input 'entity_consigne'
consigne: '{{states(entity_consigne)}}'
temp_ext: '{{ states(entity_temp_ext) }}'
temp_int: '{{ states(entity_temp_int) }}'
fenetre: '{{states(entity_fenetre)}}'
puissance: '{%set val = coeff_c * (consigne - temp_int) + coeff_t * (consigne
- temp_ext) %} {% if val > 1 and fenetre == ''off'' %} {% set val = 100 %} {%
elif val < 0 or fenetre == ''on'' %} {% set val = 0 %} {% else %} {% set val
= ( (val * 100) | round(0)) %} {% endif %} {{val}}'
temps_chauffe: '{{ puissance * 6 }}'
- alias: Met à jour l'indicateur de puissance
service: input_number.set_value
target:
entity_id: !input 'entity_puissance'
data:
value: '{{puissance}}'
- choose:
- conditions:
- condition: template
value_template: '{{puissance == 0}}'
sequence:
- service: switch.turn_off
target:
entity_id: !input 'entity_chauffage'
- conditions:
- condition: template
value_template: '{{ puissance > 99}}'
sequence:
- service: switch.turn_on
target:
entity_id: !input 'entity_chauffage'
default:
- service: switch.turn_on
target:
entity_id: !input 'entity_chauffage'
- delay: '{{temps_chauffe}}'
- service: switch.turn_off
target:
entity_id: !input 'entity_chauffage'
mode: restart

View File

@@ -0,0 +1,46 @@
blueprint:
name: Door Sensor-activated Light
description: Turn on a light when door is opened.
domain: automation
input:
doorsensor_entity:
name: Door Sensor
selector:
entity:
domain: binary_sensor
light_target:
name: Light
selector:
target:
entity:
domain: light
door_closed_wait:
name: Wait time
description: Time to leave the light on after door is closed
default: 120
selector:
number:
min: 0.0
max: 3600.0
unit_of_measurement: seconds
mode: slider
step: 1.0
source_url: https://community.home-assistant.io/t/door-sensor-turn-on-off-light/255657
mode: single
max_exceeded: silent
trigger:
platform: state
entity_id: !input 'doorsensor_entity'
from: 'off'
to: 'on'
action:
- service: light.turn_on
target: !input 'light_target'
- wait_for_trigger:
platform: state
entity_id: !input 'doorsensor_entity'
from: 'on'
to: 'off'
- delay: !input 'door_closed_wait'
- service: light.turn_off
target: !input 'light_target'

View File

@@ -0,0 +1,617 @@
blueprint:
name: Aqara Magic Cube
description: Control anything using Aqara Magic Cube.
domain: automation
input:
remote:
name: Magic Cube
description: Select the Aqara Magic Cube device
selector:
device:
integration: zha
manufacturer: LUMI
flip_90:
name: Flip 90 degrees
description: 'Actions to run when cube flips 90 degrees.
This cancels all specific 90 degrees functions.
e.g From side 1 to side 2 will be the same as from side 6 to side 2'
default: false
selector:
boolean: {}
cube_flip_90:
name: Flip cube 90 degrees
description: Action to run when cube flips 90 degrees. This only works if 'Flip
90 degrees' is toggled
default: []
selector:
action: {}
flip_180:
name: Flip 180 degrees
description: 'Actions to run when cube flips 180 degrees.
This cancels all specific 180 degrees functions
e.g From side 1 to side 4 will be the same as from side 5 to side 2'
default: false
selector:
boolean: {}
cube_flip_180:
name: Flip cube 180 degrees
description: Action to run when cube flips 180 degrees. This only works if 'Flip
180 degrees' is toggled
default: []
selector:
action: {}
flip_any:
name: Flip to any side
description: 'Actions to run when cube flips to any side.
This cares about the end side, but cancels all specific flip functions.
e.g From side 1 to side 2 will be the same as from side 6 to side 2
but different than side 1 to side 5'
default: false
selector:
boolean: {}
cube_flip_1:
name: Flip cube to side 1
description: Action to run when cube flips to side 1. This only works if 'Flip
any' is toggled
default: []
selector:
action: {}
cube_flip_2:
name: Flip cube to side 2
description: Action to run when cube flips to side 2. This only works if 'Flip
any' is toggled
default: []
selector:
action: {}
cube_flip_3:
name: Flip cube to side 3
description: Action to run when cube flips to side 3. This only works if 'Flip
any' is toggled
default: []
selector:
action: {}
cube_flip_4:
name: Flip cube to side 4
description: Action to run when cube flips to side 4. This only works if 'Flip
any' is toggled
default: []
selector:
action: {}
cube_flip_5:
name: Flip cube to side 5
description: Action to run when cube flips to side 5. This only works if 'Flip
any' is toggled
default: []
selector:
action: {}
cube_flip_6:
name: Flip cube to side 6
description: Action to run when cube flips to side 6. This only works if 'Flip
any' is toggled
default: []
selector:
action: {}
slide_any_side:
name: Slide any side
description: 'Actions to run when cube slides on any side.
This cancels all specific ''slide'' functions
e.g Slide on side 1 will be the same as slide on side 2'
default: false
selector:
boolean: {}
cube_slide_any:
name: Slide cube on any side
description: Action to run when cube slides on any slide. This only works if
'Slide any side' is toggled
default: []
selector:
action: {}
knock_any_side:
name: Knock on any side
description: 'Actions to run when knocking cube regardless of the side.
This cancels all specific ''knock'' functions
e.g Knock on side 1 will be the same as knocking side 2'
default: false
selector:
boolean: {}
cube_knock_any:
name: Knock cube on any side
description: Action to run when knocking cube on any side. This only works if
'Knock on any side' is toggled
default: []
selector:
action: {}
one_to_two:
name: From side 1 to side 2
description: Action to run when cube goes from side 1 to side 2
default: []
selector:
action: {}
one_to_three:
name: From side 1 to side 3
description: Action to run when cube goes from side 1 to side 3
default: []
selector:
action: {}
one_to_four:
name: From side 1 to side 4
description: Action to run when cube goes from side 1 to side 4
default: []
selector:
action: {}
one_to_five:
name: From side 1 to side 5
description: Action to run when cube goes from side 1 to side 5
default: []
selector:
action: {}
one_to_six:
name: From side 1 to side 6
description: Action to run when cube goes from side 1 to side 6
default: []
selector:
action: {}
two_to_one:
name: From side 2 to side 1
description: Action to run when cube goes from side 2 to side 1
default: []
selector:
action: {}
two_to_three:
name: From side 2 to side 3
description: Action to run when cube goes from side 2 to side 3
default: []
selector:
action: {}
two_to_four:
name: From side 2 to side 4
description: Action to run when cube goes from side 2 to side 4
default: []
selector:
action: {}
two_to_five:
name: From side 2 to side 5
description: Action to run when cube goes from side 2 to side 5
default: []
selector:
action: {}
two_to_six:
name: From side 2 to side 6
description: Action to run when cube goes from side 2 to side 6
default: []
selector:
action: {}
three_to_one:
name: From side 3 to side 1
description: Action to run when cube goes from side 3 to side 1
default: []
selector:
action: {}
three_to_two:
name: From side 3 to side 2
description: Action to run when cube goes from side 3 to side 2
default: []
selector:
action: {}
three_to_four:
name: From side 3 to side 4
description: Action to run when cube goes from side 3 to side 4
default: []
selector:
action: {}
three_to_five:
name: From side 3 to side 5
description: Action to run when cube goes from side 3 to side 5
default: []
selector:
action: {}
three_to_six:
name: From side 3 to side 6
description: Action to run when cube goes from side 3 to side 6
default: []
selector:
action: {}
four_to_one:
name: From side 4 to side 1
description: Action to run when cube goes from side 4 to side 1
default: []
selector:
action: {}
four_to_two:
name: From side 4 to side 2
description: Action to run when cube goes from side 4 to side 2
default: []
selector:
action: {}
four_to_three:
name: From side 4 to side 3
description: Action to run when cube goes from side 4 to side 3
default: []
selector:
action: {}
four_to_five:
name: From side 4 to side 5
description: Action to run when cube goes from side 4 to side 5
default: []
selector:
action: {}
four_to_six:
name: From side 4 to side 6
description: Action to run when cube goes from side 4 to side 6
default: []
selector:
action: {}
five_to_one:
name: From side 5 to side 1
description: Action to run when cube goes from side 5 to side 1
default: []
selector:
action: {}
five_to_two:
name: From side 5 to side 2
description: Action to run when cube goes from side 5 to side 2
default: []
selector:
action: {}
five_to_three:
name: From side 5 to side 3
description: Action to run when cube goes from side 5 to side 3
default: []
selector:
action: {}
five_to_four:
name: From side 5 to side 4
description: Action to run when cube goes from side 5 to side 4
default: []
selector:
action: {}
five_to_six:
name: From side 5 to side 6
description: Action to run when cube goes from side 5 to side 6
default: []
selector:
action: {}
six_to_one:
name: From side 6 to side 1
description: Action to run when cube goes from side 6 to side 1
default: []
selector:
action: {}
six_to_two:
name: From side 6 to side 2
description: Action to run when cube goes from side 6 to side 2
default: []
selector:
action: {}
six_to_three:
name: From side 6 to side 3
description: Action to run when cube goes from side 6 to side 3
default: []
selector:
action: {}
six_to_four:
name: From side 6 to side 4
description: Action to run when cube goes from side 6 to side 4
default: []
selector:
action: {}
six_to_five:
name: From side 6 to side 5
description: Action to run when cube goes from side 6 to side 5
default: []
selector:
action: {}
one_to_one:
name: Knock - Side 1
description: Action to run when knocking on side 1
default: []
selector:
action: {}
two_to_two:
name: Knock - Side 2
description: Action to run when knocking on side 2
default: []
selector:
action: {}
three_to_three:
name: Knock - Side 3
description: Action to run when knocking on side 3
default: []
selector:
action: {}
four_to_four:
name: Knock - Side 4
description: Action to run when knocking on side 4
default: []
selector:
action: {}
five_to_five:
name: Knock - Side 5
description: Action to run when knocking on side 5
default: []
selector:
action: {}
six_to_six:
name: Knock - Side 6
description: Action to run when knocking on side 6
default: []
selector:
action: {}
slide_on_one:
name: Slide - Side 1 up
description: Action to run when slides with Side 1 up
default: []
selector:
action: {}
slide_on_two:
name: Slide - Side 2 up
description: Action to run when slides with Side 2 up
default: []
selector:
action: {}
slide_on_three:
name: Slide - Side 3 up
description: Action to run when slides with Side 3 up
default: []
selector:
action: {}
slide_on_four:
name: Slide - Side 4 up
description: Action to run when slides with Side 4 up
default: []
selector:
action: {}
slide_on_five:
name: Slide - Side 5 up
description: Action to run when slides with Side 5 up
default: []
selector:
action: {}
slide_on_six:
name: Slide - Side 6 up
description: Action to run when slides with Side 6 up
default: []
selector:
action: {}
cube_wake:
name: Wake up the cube
description: Action to run when cube wakes up
default: []
selector:
action: {}
cube_drop:
name: Cube drops
description: Action to run when cube drops
default: []
selector:
action: {}
cube_shake:
name: Shake cube
description: Action to run when you shake the cube
default: []
selector:
action: {}
rotate_right:
name: Rotate right
description: Action to run when cube rotates right
default: []
selector:
action: {}
rotate_left:
name: Rotate left
description: Action to run when cube rotates left
default: []
selector:
action: {}
source_url: https://community.home-assistant.io/t/zha-aqara-magic-cube-57-actions/297012
mode: restart
max_exceeded: silent
trigger:
- platform: event
event_type: zha_event
event_data:
device_id: !input 'remote'
action:
- variables:
command: '{{ trigger.event.data.command }}'
value: '{{ trigger.event.data.args.value | default(0) }}'
flip_degrees: '{{ trigger.event.data.args.flip_degrees | default(0) }}'
flip_any: !input 'flip_any'
flip_90: !input 'flip_90'
flip_180: !input 'flip_180'
slide_any_side: !input 'slide_any_side'
knock_any_side: !input 'knock_any_side'
flip90: 64
flip180: 128
slide: 256
knock: 512
shake: 0
drop: 3
activated_face: "\n{% if command == \"slide\" or command == \"knock\" %}\n\n \
\ {% if trigger.event.data.args.activated_face == 1 %} 1\n\n {% elif trigger.event.data.args.activated_face\
\ == 2 %} 5\n\n {% elif trigger.event.data.args.activated_face == 3 %} 6\n\n\
\ {% elif trigger.event.data.args.activated_face == 4 %} 4\n\n {% elif trigger.event.data.args.activated_face\
\ == 5 %} 2\n\n {% elif trigger.event.data.args.activated_face == 6 %} 3\n\n\
\ {% endif %}\n\n{% elif command == 'flip' %}\n\n {{ trigger.event.data.args.activated_face\
\ | int }}\n\n{% endif %}\n"
from_face: "\n{% if command == \"flip\" and flip_degrees == 90 %}\n\n {{ ((value\
\ - flip90 - (trigger.event.data.args.activated_face - 1)) / 8) + 1 | int }}\n\
\n{% endif %}\n"
relative_degrees: "\n{% if command == \"rotate_right\" or command == \"rotate_left\"\
\ %}\n\n {{ trigger.event.data.args.relative_degrees | float }}\n\n{% endif\
\ %}\n"
- choose:
- conditions:
- '{{ command == ''rotate_right'' }}'
sequence: !input 'rotate_right'
- conditions:
- '{{ command == ''rotate_left'' }}'
sequence: !input 'rotate_left'
- conditions:
- '{{ command == ''checkin'' }}'
sequence: !input 'cube_wake'
- conditions:
- '{{ value == shake }}'
sequence: !input 'cube_shake'
- conditions:
- '{{ value == drop }}'
sequence: !input 'cube_drop'
- conditions:
- '{{ command == ''knock'' and knock_any_side }}'
sequence: !input 'cube_knock_any'
- conditions:
- '{{ command == ''slide'' and slide_any_side }}'
sequence: !input 'cube_slide_any'
- conditions:
- '{{ command == ''flip'' and flip_any }}'
sequence:
- choose:
- conditions: '{{ activated_face == 1 }}'
sequence: !input 'cube_flip_1'
- conditions: '{{ activated_face == 2 }}'
sequence: !input 'cube_flip_2'
- conditions: '{{ activated_face == 3 }}'
sequence: !input 'cube_flip_3'
- conditions: '{{ activated_face == 4 }}'
sequence: !input 'cube_flip_4'
- conditions: '{{ activated_face == 5 }}'
sequence: !input 'cube_flip_5'
- conditions: '{{ activated_face == 6 }}'
sequence: !input 'cube_flip_6'
- conditions:
- '{{ flip_degrees == 90 and flip_90 }}'
sequence: !input 'cube_flip_90'
- conditions:
- '{{ flip_degrees == 180 and flip_180 }}'
sequence: !input 'cube_flip_180'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 1 }}'
sequence:
- choose:
- conditions: '{{ from_face == 2 }}'
sequence: !input 'two_to_one'
- conditions: '{{ from_face == 3 }}'
sequence: !input 'three_to_one'
- conditions: '{{ from_face == 5 }}'
sequence: !input 'five_to_one'
- conditions: '{{ from_face == 6 }}'
sequence: !input 'six_to_one'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 2 }}'
sequence:
- choose:
- conditions: '{{ from_face == 1 }}'
sequence: !input 'one_to_two'
- conditions: '{{ from_face == 3 }}'
sequence: !input 'three_to_two'
- conditions: '{{ from_face == 4 }}'
sequence: !input 'four_to_two'
- conditions: '{{ from_face == 6 }}'
sequence: !input 'six_to_two'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 3 }}'
sequence:
- choose:
- conditions: '{{ from_face == 1 }}'
sequence: !input 'one_to_three'
- conditions: '{{ from_face == 2 }}'
sequence: !input 'two_to_three'
- conditions: '{{ from_face == 4 }}'
sequence: !input 'four_to_three'
- conditions: '{{ from_face == 5 }}'
sequence: !input 'five_to_three'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 4 }}'
sequence:
- choose:
- conditions: '{{ from_face == 2 }}'
sequence: !input 'two_to_four'
- conditions: '{{ from_face == 3 }}'
sequence: !input 'three_to_four'
- conditions: '{{ from_face == 5 }}'
sequence: !input 'five_to_four'
- conditions: '{{ from_face == 6 }}'
sequence: !input 'six_to_four'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 5 }}'
sequence:
- choose:
- conditions: '{{ from_face == 1 }}'
sequence: !input 'one_to_five'
- conditions: '{{ from_face == 3 }}'
sequence: !input 'three_to_five'
- conditions: '{{ from_face == 4 }}'
sequence: !input 'four_to_five'
- conditions: '{{ from_face == 6 }}'
sequence: !input 'six_to_five'
- conditions:
- '{{ flip_degrees == 90 and activated_face == 6 }}'
sequence:
- choose:
- conditions: '{{ from_face == 1 }}'
sequence: !input 'one_to_six'
- conditions: '{{ from_face == 2 }}'
sequence: !input 'two_to_six'
- conditions: '{{ from_face == 4 }}'
sequence: !input 'four_to_six'
- conditions: '{{ from_face == 5 }}'
sequence: !input 'five_to_six'
- conditions:
- '{{ value == flip180 + activated_face - 1 }}'
sequence:
- choose:
- conditions: '{{ activated_face == 1 }}'
sequence: !input 'four_to_one'
- conditions: '{{ activated_face == 2 }}'
sequence: !input 'five_to_two'
- conditions: '{{ activated_face == 3 }}'
sequence: !input 'six_to_three'
- conditions: '{{ activated_face == 4 }}'
sequence: !input 'one_to_four'
- conditions: '{{ activated_face == 5 }}'
sequence: !input 'two_to_five'
- conditions: '{{ activated_face == 6 }}'
sequence: !input 'three_to_six'
- conditions:
- '{{ command == ''knock'' and not knock_any_side }}'
sequence:
- choose:
- conditions: '{{ activated_face == 1 }}'
sequence: !input 'one_to_one'
- conditions: '{{ activated_face == 2 }}'
sequence: !input 'two_to_two'
- conditions: '{{ activated_face == 3 }}'
sequence: !input 'three_to_three'
- conditions: '{{ activated_face == 4 }}'
sequence: !input 'four_to_four'
- conditions: '{{ activated_face == 5 }}'
sequence: !input 'five_to_five'
- conditions: '{{ activated_face == 6 }}'
sequence: !input 'six_to_six'
- conditions:
- '{{ value == slide + activated_face - 1 }}'
sequence:
- choose:
- conditions: '{{ activated_face == 1 }}'
sequence: !input 'slide_on_one'
- conditions: '{{ activated_face == 2 }}'
sequence: !input 'slide_on_two'
- conditions: '{{ activated_face == 3 }}'
sequence: !input 'slide_on_three'
- conditions: '{{ activated_face == 4 }}'
sequence: !input 'slide_on_four'
- conditions: '{{ activated_face == 5 }}'
sequence: !input 'slide_on_five'
- conditions: '{{ activated_face == 6 }}'
sequence: !input 'slide_on_six'

View File

@@ -0,0 +1,77 @@
blueprint:
name: Thermostat chaudiere bois
description: Thermostat chaudiere bois
domain: automation
input:
entity_consigne:
name: Consigne
description: Champ d'entrée de la consigne du thermostat (input number).
selector:
entity:
domain: input_number
entity_mode:
name: mode_on_off
description: mode on off
selector:
entity:
domain: input_boolean
entity_temp_ext:
name: Température extérieure
description: Sonde de mesure de la température extérieure (sensor)
selector:
entity:
domain: sensor
device_class: temperature
entity_temp_int:
name: Température intérieure
description: Sonde de mesure de la température intérieure (sensor)
selector:
entity:
domain: sensor
device_class: temperature
entity_temp_chaud:
name: Température chaudiere bois
description: Sonde de mesure de la température chaudiere bois (sensor)
selector:
entity:
domain: sensor
device_class: temperature
entity_temp_ballon:
name: Température ballon
description: Sonde de mesure de la température du ballon (sensor)
selector:
entity:
domain: sensor
device_class: temperature
delta_thermostat:
name: delta temperature thermostat
description: delta de temperature entre consigne et temperature salon (0.6 conseillé)
selector:
number:
min: 0.0
max: 2.0
step: 0.1
entity_circulateur:
name: Circulateur
description: marche / arrêt du circulateur (switch)
selector:
entity:
domain: switch
# Récupération des paramètres
variables:
entity_temp_int: !input entity_temp_int
entity_temp_ext: !input entity_temp_ext
entity_consigne: !input entity_consigne
entity_mode: !input entity_mode
entity_temp_chaud: !input entity_temp_chaud
entity_temp_ballon: !input entity_temp_ballon
entity_delta_th: !input delta_thermostat
mode: restart

View File

@@ -0,0 +1,167 @@
blueprint:
name: Turn on light, switch, scene, script or group based on motion and illuminance.
description: "Turn on a light, switch, scene, script or group based on motion detection,\
\ and low light level.\nThis blueprint uses helper entities you have to create\
\ yourself for some input values, to be able to dynamically set limits. For instructions\
\ on creating the helper entities take a look in the Home Assistant Community\
\ forum topic: https://community.home-assistant.io/t/turn-on-light-switch-scene-or-script-based-on-motion-and-illuminance-more-conditions/257085\n\
\nRequired entities:\n - Motion sensor (single sensor or group)\n - Target entity\
\ (light, switch, scene or script)\n\n\nOptional features:\n- You can set a cutoff\
\ entity of which the value determines whether the illuminance level is low and\
\ the automation needs to trigger. - You can define a blocking entity, which blocks\
\ the automation from running when this entity's state is on. - You van define\
\ a turn-off blocking entity, which blocks the entity from turning off after the\
\ set delay. - Time limits can also be defined to limit the time before and after\
\ the automation should trigger. - If you want the entity to turn off after a\
\ certain amount of minutes, you can use the Wait Time input. - If you want another\
\ entity than the target_entity to turn off after the delay, you can define a\
\ separate Turn-off entity. - If you do not enable the optional entities the automation\
\ will skip these conditions.\n\n\nOptional entities:\n- Illuminance sensor (sensor\
\ in illuminance class)\n- Illuminance cutoff value (input_number)\n- Blocking\
\ entity (any entity with state on/off)\n- Time limit before (input_datetime)\n\
- Time limit after (input_datetime)\n- Turn off wait time [in minutes!] (input_number)\
\ - will not work with script or scene target entities.\n- Turn off entity (any\
\ entity_id)\n"
domain: automation
input:
motion_sensor:
name: Motion Sensor
description: This sensor will trigger the turning on of the target entity.
selector:
entity: {}
target_entity:
name: Target entity.
description: The light, switch, scene to turn on (or script to run) when the
automation is triggered.
selector:
entity: {}
illuminance_sensor:
name: (OPTIONAL) Illuminance sensor
description: This sensor will be used to determine the illumination.
default:
selector:
entity:
domain: sensor
device_class: illuminance
illuminance_cutoff:
name: (OPTIONAL) Illuminance cutoff value
description: This input_number will be used to compare to the current illumination
to determine if it is low.
default:
selector:
entity:
domain: input_number
blocker_entity:
name: (OPTIONAL) Blocking entity
description: If this entity's state is on, it will prevent the automation from
running. E.g. sleepmode or away mode.
default:
selector:
entity: {}
time_limit_after:
name: (OPTIONAL) Only run after time.
description: Automation will only run when time is later than this input_datetime
value.
default:
selector:
entity:
domain: input_datetime
time_limit_before:
name: (OPTIONAL) Only run before time.
description: Automation will only run when time is earlier than this input_datetime
value.
default:
selector:
entity:
domain: input_datetime
no_motion_wait:
name: (OPTIONAL) Turn off wait time (minutes)
description: Time in minutes to leave the target entity on after last motion
is detected. If not used entity will not auto turn off.
default:
selector:
entity:
domain: input_number
turn_off_blocker_entity:
name: (OPTIONAL) Turn-off Blocking entity
description: If this entity's state is on, it will prevent the target entity
from turning off after the set delay.
default:
selector:
entity: {}
target_off_entity:
name: (OPTIONAL) Turn-off entity
description: If defined, this entity will be turned off instead of the default
target entity. This can be helpful when using target entities of type scene
or script.
default:
selector:
entity: {}
source_url: https://gist.github.com/freakshock88/2311759ba64f929f6affad4c0a67110b
mode: restart
max_exceeded: silent
variables:
target_entity: !input 'target_entity'
illuminance_currently: !input 'illuminance_sensor'
illuminance_cutoff: !input 'illuminance_cutoff'
blocker_entity: !input 'blocker_entity'
time_limit_before: !input 'time_limit_before'
time_limit_after: !input 'time_limit_after'
no_motion_wait: !input 'no_motion_wait'
entity_domain: '{{ states[target_entity].domain }}'
turn_off_blocker_entity: !input 'turn_off_blocker_entity'
target_off_entity: !input 'target_off_entity'
trigger:
platform: state
entity_id: !input 'motion_sensor'
to: 'on'
condition:
- condition: template
value_template: '{{ (states[target_entity].state == ''on'') or (illuminance_currently
== none) or (illuminance_cutoff == none) or (states[illuminance_currently].state
| int < states[illuminance_cutoff].state | int) }}'
- condition: template
value_template: '{{ (blocker_entity == none) or (states[blocker_entity].state ==
''off'') }}'
- condition: template
value_template: "{% set current_time = now().strftime(\"%H:%M\") %}\n{% if time_limit_before\
\ == none and time_limit_after == none %} true {% endif %}\n{% if time_limit_before\
\ != none and time_limit_after == none %} {% set current_time_is_before_limit\
\ = current_time < states[time_limit_before].state %} {{ current_time_is_before_limit\
\ }} {% elif time_limit_before == none and time_limit_after != none %} {% set\
\ current_time_is_after_limit = current_time > states[time_limit_after].state\
\ %} {{ current_time_is_after_limit }} {% endif %}\n{% if time_limit_before !=\
\ none and time_limit_after != none %} {% set before_limit_is_tomorrow = states[time_limit_before].state\
\ < states[time_limit_after].state %} {% set current_time_is_before_limit = current_time\
\ < states[time_limit_before].state %} {% set current_time_is_after_limit = current_time\
\ > states[time_limit_after].state %} {% set time_window_spans_midnight = states[time_limit_after].state\
\ > states[time_limit_before].state %}\n {% if time_window_spans_midnight !=\
\ none and time_window_spans_midnight and before_limit_is_tomorrow %}\n {{ current_time_is_after_limit\
\ or current_time_is_before_limit }}\n {% elif time_window_spans_midnight !=\
\ none and not time_window_spans_midnight %}\n {{ current_time_is_before_limit\
\ and current_time_is_after_limit }}\n {% endif %}\n{% endif %}\n"
action:
- service: homeassistant.turn_on
entity_id: !input 'target_entity'
- condition: template
value_template: '{{ no_motion_wait != none }}'
- wait_for_trigger:
platform: state
entity_id: !input 'motion_sensor'
from: 'on'
to: 'off'
- delay:
minutes: '{{ states[no_motion_wait].state | int }}'
- condition: template
value_template: '{{ (turn_off_blocker_entity == none) or (states[turn_off_blocker_entity].state
== ''off'') }}'
- choose:
- conditions:
- condition: template
value_template: '{{ (target_off_entity != none) }}'
sequence:
- service: homeassistant.turn_off
entity_id: !input 'target_off_entity'
default:
- service: homeassistant.turn_off
entity_id: !input 'target_entity'

View File

@@ -0,0 +1,288 @@
blueprint:
name: Zigbee2MQTT - Aqara Magic Cube (MFKZQ01LM)
description: 'This blueprint allows you to make automations for the Aqara Magic
Cube, connected to Zigbee2MQTT.
Limitations (these are by design):
- There is just one trigger for flip to a side, separate actions for flip90 and
flip180 aren''t possible.
- For fall there are no separate actions for the side, due to the nature of falling
it becomes random what will happen.
- For shake there are no separate actions for the side, due to the nature of shaking
it isn''t always clear which side is up.
'
domain: automation
source_url: https://github.com/golles/Home-Assistant-Blueprints/blob/9a63ee03f1d0cf10448fb89a28528ac4105461ac/zigbee2mqtt_aqara_magic_cube.yaml
input:
remote:
name: Remote
description: Aqara Magic Cube to use, entity should end with _action
selector:
entity:
integration: mqtt
domain: sensor
fall:
name: Drop the cube
default: []
selector:
action: {}
flip_side_0:
name: Flip the cube to side 0
default: []
selector:
action: {}
flip_side_1:
name: Flip the cube to side 1
default: []
selector:
action: {}
flip_side_2:
name: Flip the cube to side 2
default: []
selector:
action: {}
flip_side_3:
name: Flip the cube to side 3
default: []
selector:
action: {}
flip_side_4:
name: Flip the cube to side 4
default: []
selector:
action: {}
flip_side_5:
name: Flip the cube to side 5
default: []
selector:
action: {}
rotate_left_side_0:
name: Rotate cube left with side 0 up
default: []
selector:
action: {}
rotate_left_side_1:
name: Rotate cube left with side 1 up
default: []
selector:
action: {}
rotate_left_side_2:
name: Rotate cube left with side 2 up
default: []
selector:
action: {}
rotate_left_side_3:
name: Rotate cube left with side 3 up
default: []
selector:
action: {}
rotate_left_side_4:
name: Rotate cube left with side 4 up
default: []
selector:
action: {}
rotate_left_side_5:
name: Rotate cube left with side 5 up
default: []
selector:
action: {}
rotate_right_side_0:
name: Rotate cube right with side 0 up
default: []
selector:
action: {}
rotate_right_side_1:
name: Rotate cube right with side 1 up
default: []
selector:
action: {}
rotate_right_side_2:
name: Rotate cube right with side 2 up
default: []
selector:
action: {}
rotate_right_side_3:
name: Rotate cube right with side 3 up
default: []
selector:
action: {}
rotate_right_side_4:
name: Rotate cube right with side 4 up
default: []
selector:
action: {}
rotate_right_side_5:
name: Rotate cube right with side 5 up
default: []
selector:
action: {}
shake:
name: Shake the cube
default: []
selector:
action: {}
slide_side_0:
name: Slide the cube with side 0 up
default: []
selector:
action: {}
slide_side_1:
name: Slide the cube with side 1 up
default: []
selector:
action: {}
slide_side_2:
name: Slide the cube with side 2 up
default: []
selector:
action: {}
slide_side_3:
name: Slide the cube with side 3 up
default: []
selector:
action: {}
slide_side_4:
name: Slide the cube with side 4 up
default: []
selector:
action: {}
slide_side_5:
name: Slide the cube with side 5 up
default: []
selector:
action: {}
tap_side_0:
name: Tap the cube with side 0 up
default: []
selector:
action: {}
tap_side_1:
name: Tap the cube with side 1 up
default: []
selector:
action: {}
tap_side_2:
name: Tap the cube with side 2 up
default: []
selector:
action: {}
tap_side_3:
name: Tap the cube with side 3 up
default: []
selector:
action: {}
tap_side_4:
name: Tap the cube with side 4 up
default: []
selector:
action: {}
tap_side_5:
name: Tap the cube with side 5 up
default: []
selector:
action: {}
mode: queued
max: 5
max_exceeded: silent
trigger:
- platform: state
entity_id: !input 'remote'
attribute: action
to:
- fall
- flip180
- flip90
- rotate_left
- rotate_right
- shake
- slide
- tap
action:
- variables:
event: '{{ trigger.to_state.attributes.action }}'
side: '{{ trigger.to_state.attributes.side }}'
- choose:
- conditions: '{{ event == "fall" }}'
sequence: !input 'fall'
- conditions: '{{ event == "flip180" or event == "flip90" }}'
sequence:
- choose:
- conditions: '{{ side == 0 }}'
sequence: !input 'flip_side_0'
- conditions: '{{ side == 1 }}'
sequence: !input 'flip_side_1'
- conditions: '{{ side == 2 }}'
sequence: !input 'flip_side_2'
- conditions: '{{ side == 3 }}'
sequence: !input 'flip_side_3'
- conditions: '{{ side == 4 }}'
sequence: !input 'flip_side_4'
- conditions: '{{ side == 5 }}'
sequence: !input 'flip_side_5'
- conditions: '{{ event == "rotate_left" }}'
sequence:
- choose:
- conditions: '{{ side == 0 }}'
sequence: !input 'rotate_left_side_0'
- conditions: '{{ side == 1 }}'
sequence: !input 'rotate_left_side_1'
- conditions: '{{ side == 2 }}'
sequence: !input 'rotate_left_side_2'
- conditions: '{{ side == 3 }}'
sequence: !input 'rotate_left_side_3'
- conditions: '{{ side == 4 }}'
sequence: !input 'rotate_left_side_4'
- conditions: '{{ side == 5 }}'
sequence: !input 'rotate_left_side_5'
- conditions: '{{ event == "rotate_right" }}'
sequence:
- choose:
- conditions: '{{ side == 0 }}'
sequence: !input 'rotate_right_side_0'
- conditions: '{{ side == 1 }}'
sequence: !input 'rotate_right_side_1'
- conditions: '{{ side == 2 }}'
sequence: !input 'rotate_right_side_2'
- conditions: '{{ side == 3 }}'
sequence: !input 'rotate_right_side_3'
- conditions: '{{ side == 4 }}'
sequence: !input 'rotate_right_side_4'
- conditions: '{{ side == 5 }}'
sequence: !input 'rotate_right_side_5'
- conditions: '{{ event == "shake" }}'
sequence: !input 'shake'
- conditions: '{{ event == "slide" }}'
sequence:
- choose:
- conditions: '{{ side == 0 }}'
sequence: !input 'slide_side_0'
- conditions: '{{ side == 1 }}'
sequence: !input 'slide_side_1'
- conditions: '{{ side == 2 }}'
sequence: !input 'slide_side_2'
- conditions: '{{ side == 3 }}'
sequence: !input 'slide_side_3'
- conditions: '{{ side == 4 }}'
sequence: !input 'slide_side_4'
- conditions: '{{ side == 5 }}'
sequence: !input 'slide_side_5'
- conditions: '{{ event == "tap" }}'
sequence:
- choose:
- conditions: '{{ side == 0 }}'
sequence: !input 'tap_side_0'
- conditions: '{{ side == 1 }}'
sequence: !input 'tap_side_1'
- conditions: '{{ side == 2 }}'
sequence: !input 'tap_side_2'
- conditions: '{{ side == 3 }}'
sequence: !input 'tap_side_3'
- conditions: '{{ side == 4 }}'
sequence: !input 'tap_side_4'
- conditions: '{{ side == 5 }}'
sequence: !input 'tap_side_5'

View File

@@ -3,15 +3,13 @@ blueprint:
description: Turn on a light when motion is detected.
domain: automation
source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/automation/blueprints/motion_light.yaml
author: Home Assistant
input:
motion_entity:
name: Motion Sensor
selector:
entity:
filter:
device_class: motion
domain: binary_sensor
domain: binary_sensor
device_class: motion
light_target:
name: Light
selector:

View File

@@ -3,27 +3,23 @@ blueprint:
description: Send a notification to a device when a person leaves a specific zone.
domain: automation
source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml
author: Home Assistant
input:
person_entity:
name: Person
selector:
entity:
filter:
domain: person
domain: person
zone_entity:
name: Zone
selector:
entity:
filter:
domain: zone
domain: zone
notify_device:
name: Device to notify
description: Device needs to run the official Home Assistant app to receive notifications.
selector:
device:
filter:
integration: mobile_app
integration: mobile_app
trigger:
platform: state
@@ -38,9 +34,7 @@ variables:
condition:
condition: template
# The first case handles leaving the Home zone which has a special state when zoning called 'home'.
# The second case handles leaving all other zones.
value_template: "{{ zone_entity == 'zone.home' and trigger.from_state.state == 'home' and trigger.to_state.state != 'home' or trigger.from_state.state == zone_state and trigger.to_state.state != zone_state }}"
value_template: "{{ trigger.from_state.state == zone_state and trigger.to_state.state != zone_state }}"
action:
- alias: "Notify that a person has left the zone"

View File

@@ -17,7 +17,7 @@ homeassistant:
http: #si cette balise n'existe pas, ajoutez la, sinon ajouter seulement la suite (pas de duplication)
use_x_forwarded_for: true
trusted_proxies:
- 10.0.0.16
- 10.0.0.202
# Loads default set of integrations. Do not remove.
default_config:
@@ -76,6 +76,91 @@ recorder:
auto_purge: true
auto_repack: true
commit_interval: 5
include:
domains:
- light
- switch
- cover
#entity_globs:
# - sensor*
entities:
- sensor.tac2100_compteur_puissance_active
- sensor.tac2100_compteur_courant
- sensor.ecowitt_tempin
- sensor.select_sql_query
- sensor.ecu_current_power
- sensor.tac2100_solar_puissance_active
- sensor.disjoncteur_domo_z_power
- sensor.tac2100_compteur_energie_active_totale
- sensor.ecu_today_energy
- sensor.ecowitt_temp
- sensor.ecowitt_dewpoint
- sensor.ecowitt_humidity
- sensor.ecowitt_humidityin
- sensor.ecowitt_baromabs
- sensor.ecowitt_dailyrain
- sensor.ecowitt_eventrain
- sensor.ecowitt_hourlyrain
- sensor.ecowitt_monthlyrain
- sensor.ecowitt_rainrate
- sensor.ecowitt_solarradiation
- sensor.ecowitt_totalrain
- sensor.ecowitt_windgust
- sensor.ecowitt_baromrel
- sensor.ecowitt_uv
- sensor.presence_cuisine_motion_state
- sensor.ecowitt_winddir
- sensor.ecowitt_maxdailygust
- sensor.ecowitt_windspeed
- sensor.ecowitt_frostpoint
- sensor.ecowitt_feelslike
- sensor.qualite_air_co2
- sensor.geiger_wemos_geiger
- sensor.froling_s3_tdeg_fumee
- sensor.froling_s3_tdeg_board
- sensor.froling_s3_tdeg_depart_chauffage
- sensor.froling_s3_tdeg_chaudiere
- sensor.esp8266_tampon_temp_4d756f_temp_retour_chauff
- sensor.froling_s3_tampon_haut
- sensor.froling_s3_tampon_bas
- sensor.esp8266_tampon_temp_4d756f_tampon_milieu
- sensor.river2_battery_level
- sensor.tac2100_compteur_tension
- sensor.energy_pj1203_solar_power_b
- sensor.energy_pj1203_solar_power_a
- sensor.energy_pj1203_solar_current_b
- sensor.energy_pj1203_solar_current_a
- sensor.energy_pj1203_solar_energy_flow_a
- sensor.energy_pj1203_solar_energy_flow_b
- sensor.energy_pj1203_solar_energy_produced_a
- sensor.energy_pj1203_solar_energy_produced_b
- sensor.ecu_current_power
- sensor.dell_5520_battery_dell5520
- sensor.blitzortung_lightning_counter
- sensor.compteur_eclair_mensuel
- sensor.pj1203_zb_compteur_current_a
- sensor.pj1203_zb_compteur_current_b
- sensor.pj1203_zb_compteur_power_a
- sensor.pj1203_zb_compteur_power_b
- sensor.capteur_temperature_rond_temperature
- sensor.smart_energy_meter_puissance
- sensor.disjoncteur_domo_z_energy
- sensor.prise_ecran_power
- sensor.kc868_a8_d758d0_d758d0_bh1750_illuminance
- sensor.t_chambre1
- sensor.t_salle_de_bain
- sensor.h_salle_de_bain
- sensor.t_comble
- sensor.t_chambre2
- sensor.esp32_4_relays_garage_5a10c8_temperature_garage_5
- sensor.wemos_cave_temperature
- sensor.ecowitt_heatindex
- sensor.wemos_bureau0_temperature
- sensor.blitzortung_lightning_counter
- sensor.aubess_cafetiere1_power
#event_types:
# - call_service
alarm_control_panel:
- platform: manual
@@ -93,6 +178,18 @@ alarm_control_panel:
arming_time: 0
delay_time: 0
proxmoxve:
- host: 10.0.0.205
username: hass@pve
password: Misstibet5*
verify_ssl: false
realm: pve
nodes:
- node: hpproliant
vms:
- 200
- 201
api:
wake_on_lan:
@@ -133,4 +230,3 @@ camera:
- platform: local_file
name: MF_alerte_tomorrow
file_path: /config/www/weather/meteo_france_alerte_tomorrow.png

View File

@@ -1,40 +1,46 @@
from homeassistant.const import Platform
from . import const, BaseDevice, EntityMigration, MigrationAction
from .const import ATTR_DESIGN_CAPACITY, ATTR_FULL_CAPACITY, ATTR_REMAIN_CAPACITY, BATTERY_CHARGING_STATE, \
MAIN_DESIGN_CAPACITY, MAIN_FULL_CAPACITY, MAIN_REMAIN_CAPACITY
from ..entities import BaseSensorEntity, BaseNumberEntity, BaseSwitchEntity, BaseSelectEntity
from ..mqtt.ecoflow_mqtt import EcoflowMQTTClient
from ..number import ChargingPowerEntity, MaxBatteryLevelEntity, MinBatteryLevelEntity
from ..number import ChargingPowerEntity, MaxBatteryLevelEntity, MinBatteryLevelEntity, BatteryBackupLevel
from ..select import DictSelectEntity, TimeoutDictSelectEntity
from ..sensor import LevelSensorEntity, RemainSensorEntity, TempSensorEntity, \
CyclesSensorEntity, InWattsSensorEntity, OutWattsSensorEntity, VoltSensorEntity, StatusSensorEntity, \
MilliVoltSensorEntity, InMilliVoltSensorEntity, OutMilliVoltSensorEntity, ChargingStateSensorEntity, \
CapacitySensorEntity
CyclesSensorEntity, InWattsSensorEntity, OutWattsSensorEntity, VoltSensorEntity, InAmpSensorEntity, \
InVoltSensorEntity, QuotasStatusSensorEntity, MilliVoltSensorEntity, InMilliVoltSensorEntity, \
OutMilliVoltSensorEntity, ChargingStateSensorEntity, CapacitySensorEntity
from ..switch import EnabledEntity
class River2(BaseDevice):
def charging_power_step(self) -> int:
return 50
def sensors(self, client: EcoflowMQTTClient) -> list[BaseSensorEntity]:
return [
LevelSensorEntity(client, "bms_bmsStatus.soc", const.MAIN_BATTERY_LEVEL)
.attr("bms_bmsStatus.designCap", const.ATTR_DESIGN_CAPACITY, 0)
.attr("bms_bmsStatus.fullCap", const.ATTR_FULL_CAPACITY, 0)
.attr("bms_bmsStatus.remainCap", const.ATTR_REMAIN_CAPACITY, 0),
CapacitySensorEntity(client, "bms_bmsStatus.designCap", const.MAIN_DESIGN_CAPACITY, False),
CapacitySensorEntity(client, "bms_bmsStatus.fullCap", const.MAIN_FULL_CAPACITY, False),
CapacitySensorEntity(client, "bms_bmsStatus.remainCap", const.MAIN_REMAIN_CAPACITY, False),
.attr("bms_bmsStatus.designCap", ATTR_DESIGN_CAPACITY, 0)
.attr("bms_bmsStatus.fullCap", ATTR_FULL_CAPACITY, 0)
.attr("bms_bmsStatus.remainCap", ATTR_REMAIN_CAPACITY, 0),
CapacitySensorEntity(client, "bms_bmsStatus.designCap", MAIN_DESIGN_CAPACITY, False),
CapacitySensorEntity(client, "bms_bmsStatus.fullCap", MAIN_FULL_CAPACITY, False),
CapacitySensorEntity(client, "bms_bmsStatus.remainCap", MAIN_REMAIN_CAPACITY, False),
LevelSensorEntity(client, "bms_bmsStatus.soh", const.SOH),
LevelSensorEntity(client, "bms_emsStatus.lcdShowSoc", const.COMBINED_BATTERY_LEVEL),
ChargingStateSensorEntity(client, "bms_emsStatus.chgState", const.BATTERY_CHARGING_STATE),
ChargingStateSensorEntity(client, "bms_emsStatus.chgState", BATTERY_CHARGING_STATE),
InWattsSensorEntity(client, "pd.wattsInSum", const.TOTAL_IN_POWER),
OutWattsSensorEntity(client, "pd.wattsOutSum", const.TOTAL_OUT_POWER),
InAmpSensorEntity(client, "inv.dcInAmp", const.SOLAR_IN_CURRENT),
InVoltSensorEntity(client, "inv.dcInVol", const.SOLAR_IN_VOLTAGE),
InWattsSensorEntity(client, "inv.inputWatts", const.AC_IN_POWER),
OutWattsSensorEntity(client, "inv.outputWatts", const.AC_OUT_POWER),
@@ -44,14 +50,15 @@ class River2(BaseDevice):
InWattsSensorEntity(client, "pd.typecChaWatts", const.TYPE_C_IN_POWER),
InWattsSensorEntity(client, "mppt.inWatts", const.SOLAR_IN_POWER),
OutWattsSensorEntity(client, "pd.carWatts", const.DC_OUT_POWER),
OutWattsSensorEntity(client, "pd.typec1Watts", const.TYPEC_1_OUT_POWER),
# both USB-A Ports (the small RIVER 2 has only two) are being summarized under "pd.usb1Watts" - https://github.com/tolwi/hassio-ecoflow-cloud/issues/12#issuecomment-1432837393
OutWattsSensorEntity(client, "pd.carWatts", const.DC_OUT_POWER),
OutWattsSensorEntity(client, "pd.typec1Watts", const.TYPEC_OUT_POWER),
OutWattsSensorEntity(client, "pd.usb1Watts", const.USB_OUT_POWER),
# OutWattsSensorEntity(client, "pd.usb2Watts", const.USB_2_OUT_POWER),
RemainSensorEntity(client, "bms_emsStatus.chgRemainTime", const.CHARGE_REMAINING_TIME),
RemainSensorEntity(client, "bms_emsStatus.dsgRemainTime", const.DISCHARGE_REMAINING_TIME),
RemainSensorEntity(client, "pd.remainTime", const.REMAINING_TIME),
TempSensorEntity(client, "inv.outTemp", "Inv Out Temperature"),
CyclesSensorEntity(client, "bms_bmsStatus.cycles", const.CYCLES),
@@ -68,8 +75,8 @@ class River2(BaseDevice):
MilliVoltSensorEntity(client, "bms_bmsStatus.minCellVol", const.MIN_CELL_VOLT, False),
MilliVoltSensorEntity(client, "bms_bmsStatus.maxCellVol", const.MAX_CELL_VOLT, False),
QuotasStatusSensorEntity(client),
# FanSensorEntity(client, "bms_emsStatus.fanLevel", "Fan Level"),
StatusSensorEntity(client),
]
@@ -86,6 +93,14 @@ class River2(BaseDevice):
ChargingPowerEntity(client, "mppt.cfgChgWatts", const.AC_CHARGING_POWER, 100, 360,
lambda value: {"moduleType": 5, "operateType": "acChgCfg",
"params": {"chgWatts": int(value), "chgPauseFlag": 255}}),
BatteryBackupLevel(client, "pd.bpPowerSoc", const.BACKUP_RESERVE_LEVEL, 5, 100,
"bms_emsStatus.minDsgSoc", "bms_emsStatus.maxChargeSoc",
lambda value: {"moduleType": 1, "operateType": "watthConfig",
"params": {"isConfig": 1,
"bpPowerSoc": int(value),
"minDsgSoc": 0,
"minChgSoc": 0}}),
]
def switches(self, client: EcoflowMQTTClient) -> list[BaseSwitchEntity]:
@@ -95,15 +110,28 @@ class River2(BaseDevice):
"params": {"enabled": value, "out_voltage": -1, "out_freq": 255,
"xboost": 255}}),
EnabledEntity(client, "pd.acAutoOutConfig", const.AC_ALWAYS_ENABLED,
lambda value, params: {"moduleType": 1, "operateType": "acAutoOutConfig",
"params": {"acAutoOutConfig": value,
"minAcOutSoc": int(params.get("bms_emsStatus.minDsgSoc", 0)) + 5}}
),
EnabledEntity(client, "mppt.cfgAcXboost", const.XBOOST_ENABLED,
lambda value: {"moduleType": 5, "operateType": "acOutCfg",
"params": {"enabled": 255, "out_voltage": -1, "out_freq": 255,
"xboost": value}}),
EnabledEntity(client, "pd.carState", const.DC_ENABLED,
lambda value: {"moduleType": 5, "operateType": "mpptCar", "params": {"enabled": value}})
lambda value: {"moduleType": 5, "operateType": "mpptCar", "params": {"enabled": value}}),
EnabledEntity(client, "pd.bpPowerSoc", const.BP_ENABLED,
lambda value, params: {"moduleType": 1, "operateType": "watthConfig",
"params": {"isConfig": value,
"bpPowerSoc": value,
"minDsgSoc": 0,
"minChgSoc": 0}})
]
def selects(self, client: EcoflowMQTTClient) -> list[BaseSelectEntity]:
return [
DictSelectEntity(client, "mppt.dcChgCurrent", const.DC_CHARGE_CURRENT, const.DC_CHARGE_CURRENT_OPTIONS,

View File

@@ -13,5 +13,5 @@
"reactivex==4.0.4",
"protobuf>=4.23.0"
],
"version": "0.13.3"
"version": "0.13.4"
}

View File

@@ -0,0 +1,27 @@
{
"title": "EcoFlow-Cloud",
"config": {
"step": {
"user": {
"data": {
"username": "Email do utilizador",
"password": "Palavra-passe do utilizador",
"type": "Tipo de dispositivo",
"name": "Nome do dispositivo",
"device_id": "SN do dispositivo"
}
}
}
},
"options": {
"step": {
"init": {
"data": {
"power_step": "Controlo deslizante da potência",
"refresh_period_sec": "Período de atualização dos dados (segundos)"
}
}
}
}
}

View File

@@ -14,5 +14,5 @@
"iot_class": "local_push",
"issue_tracker": "https://github.com/blakeblackshear/frigate-hass-integration/issues",
"requirements": ["pytz"],
"version": "5.2.0"
"version": "5.3.0"
}

View File

@@ -276,6 +276,13 @@ class NotificationsProxyView(ProxyView):
if path.endswith("clip.mp4"):
return f"api/events/{event_id}/clip.mp4"
if path.endswith("event_preview.gif"):
return f"api/events/{event_id}/preview.gif"
if path.endswith("review_preview.gif"):
return f"api/review/{event_id}/preview"
return None
def _permit_request(

View File

@@ -6,7 +6,7 @@ DOMAIN = "irrigation_unlimited"
DOMAIN_DATA = f"{DOMAIN}_data"
COORDINATOR = "coordinator"
COMPONENT = "component"
VERSION = "2024.5.0"
VERSION = "2024.8.0"
ATTRIBUTION = "Data provided by http://jsonplaceholder.typicode.com/"
ISSUE_URL = "https://github.com/rgc99/irrigation_unlimited/issues"
@@ -141,6 +141,7 @@ SERVICE_SUSPEND = "suspend"
SERVICE_SKIP = "skip"
SERVICE_PAUSE = "pause"
SERVICE_RESUME = "resume"
SERVICE_GET_INFO = "get_info"
# Events
EVENT_START = "start"
@@ -161,6 +162,7 @@ STATUS_DELAY = "delay"
TIMELINE_STATUS = "status"
TIMELINE_START = "start"
TIMELINE_END = "end"
TIMELINE_SCHEDULE = "schedule"
TIMELINE_SCHEDULE_NAME = "schedule_name"
TIMELINE_ADJUSTMENT = "adjustment"
@@ -211,14 +213,16 @@ ATTR_VOLUME = "volume"
ATTR_FLOW_RATE = "flow_rate"
ATTR_SWITCH_ENTITIES = "switch_entity_id"
ATTR_SEQUENCE_COUNT = "sequence_count"
ATTR_CONTROLLER_ID = "controller_id"
ATTR_ZONE_ID = "zone_id"
ATTR_CONTROLLERS = "controllers"
ATTR_SEQUENCES = "sequences"
ATTR_VERSION = "version"
# Resources
RES_MANUAL = "Manual"
RES_NOT_RUNNING = "not running"
RES_NONE = "none"
RES_CONTROLLER = "Controller"
RES_ZONE = "Zone"
RES_MASTER = "Master"
RES_TIMELINE_RUNNING = "running"
RES_TIMELINE_SCHEDULED = "scheduled"
RES_TIMELINE_NEXT = "next"

View File

@@ -1,10 +1,15 @@
"""History access and caching. This module runs asynchronously collecting
and caching history data"""
from datetime import datetime, timedelta
from typing import Callable, OrderedDict, Any
from homeassistant.core import HomeAssistant, State, CALLBACK_TYPE
from homeassistant.util import dt
from homeassistant.components.recorder.const import DATA_INSTANCE as RECORDER_INSTANCE
try:
from homeassistant.helpers.recorder import DATA_INSTANCE
except ImportError:
from homeassistant.components.recorder.const import DATA_INSTANCE
from homeassistant.components.recorder import get_instance
from homeassistant.helpers.event import (
async_track_point_in_utc_time,
@@ -15,6 +20,7 @@ from homeassistant.const import STATE_ON
from .const import (
ATTR_CURRENT_ADJUSTMENT,
ATTR_CURRENT_NAME,
ATTR_CURRENT_SCHEDULE,
CONF_ENABLED,
CONF_HISTORY,
CONF_HISTORY_REFRESH,
@@ -22,6 +28,7 @@ from .const import (
CONF_REFRESH_INTERVAL,
CONF_SPAN,
TIMELINE_ADJUSTMENT,
TIMELINE_SCHEDULE,
TIMELINE_SCHEDULE_NAME,
TIMELINE_START,
TIMELINE_END,
@@ -166,6 +173,7 @@ class IUHistory:
result = OrderedDict()
result[TIMELINE_START] = round_seconds_dt(item.last_changed)
result[TIMELINE_END] = round_seconds_dt(end)
result[TIMELINE_SCHEDULE] = item.attributes.get(ATTR_CURRENT_SCHEDULE)
result[TIMELINE_SCHEDULE_NAME] = item.attributes.get(ATTR_CURRENT_NAME)
result[TIMELINE_ADJUSTMENT] = item.attributes.get(
ATTR_CURRENT_ADJUSTMENT, ""
@@ -197,7 +205,7 @@ class IUHistory:
return
start = self._stime - self._history_span
if RECORDER_INSTANCE in self._hass.data:
if DATA_INSTANCE in self._hass.data:
data = await get_instance(self._hass).async_add_executor_job(
history.get_significant_states,
self._hass,

View File

@@ -165,6 +165,7 @@ from .const import (
RES_TIMELINE_SCHEDULED,
TIMELINE_ADJUSTMENT,
TIMELINE_END,
TIMELINE_SCHEDULE,
TIMELINE_SCHEDULE_NAME,
TIMELINE_START,
MONTHS,
@@ -1064,10 +1065,10 @@ class IUVolume:
self._zone = zone
# Config parameters
self._sensor_id: str = None
self._volume_precision: int = 3
self._volume_scale: float = 1
self._flow_rate_precision: int = 3
self._flow_rate_scale: float = 3600
self._volume_precision: int = None
self._volume_scale: float = None
self._flow_rate_precision: int = None
self._flow_rate_scale: float = None
# Private variables
self._callback_remove: CALLBACK_TYPE = None
self._start_volume: Decimal = None
@@ -1078,9 +1079,11 @@ class IUVolume:
str, Callable[[datetime, "IUZone", Decimal, Decimal], None]
] = {}
self._flow_rates: list[Decimal] = []
self._flow_rate_sum = Decimal(0)
self._flow_rate_sum: Decimal = None
self._flow_rate_sma: Decimal = None
self._sensor_readings: list[IUVolumeSensorReading] = []
self.reset_config()
self.reset_readings()
@property
def total(self) -> float | None:
@@ -1096,6 +1099,25 @@ class IUVolume:
return float(self._flow_rate_sma)
return None
def reset_config(self) -> None:
"""Reset this object"""
self.end_record(None)
self._sensor_id = None
self._volume_precision = 3
self._volume_scale = 1
self._flow_rate_precision = 3
self._flow_rate_scale = 3600
def reset_readings(self) -> None:
"""Reset reading parameters"""
self._start_volume = None
self._total_volume = None
self._start_time = None
self._sensor_readings.clear()
self._flow_rates.clear()
self._flow_rate_sum = 0
self._flow_rate_sma = None
def load(self, config: OrderedDict, all_zones: OrderedDict) -> "IUSwitch":
"""Load volume data from the configuration"""
@@ -1114,6 +1136,8 @@ class IUVolume:
CONF_FLOW_RATE_SCALE, self._flow_rate_scale
)
self.reset_config()
self.reset_readings()
if all_zones is not None:
load_params(all_zones.get(CONF_VOLUME))
load_params(config.get(CONF_VOLUME))
@@ -1174,37 +1198,32 @@ class IUVolume:
parameters in the event message"""
return event
async def sensor_state_change(self, event: HAEvent):
event = self.event_hook(event)
stime = event.time_fired
try:
value = self.read_sensor(stime)
except ValueError as e:
self._coordinator.logger.log_invalid_meter_value(stime, e)
except IUVolumeSensorError:
self._coordinator.logger.log_invalid_meter_id(stime, self._sensor_id)
else:
self._total_volume = value - self._start_volume
# Notifiy our trackers
for listener in list(self._listeners.values()):
await listener(
stime,
self._zone,
self._total_volume,
self._flow_rate_sma,
)
def start_record(self, stime: datetime) -> None:
"""Start recording volume information"""
def sensor_state_change(event: HAEvent):
event = self.event_hook(event)
try:
value = self.read_sensor(event.time_fired)
except ValueError as e:
self._coordinator.logger.log_invalid_meter_value(stime, e)
except IUVolumeSensorError:
self._coordinator.logger.log_invalid_meter_id(stime, self._sensor_id)
else:
self._total_volume = value - self._start_volume
# Notifiy our trackers
for listener in list(self._listeners.values()):
listener(
event.time_fired,
self._zone,
self._total_volume,
self._flow_rate_sma,
)
self.reset_readings()
if self._sensor_id is None:
return
self._start_volume = self._total_volume = None
self._start_time = stime
self._sensor_readings.clear()
self._flow_rates.clear()
self._flow_rate_sum = 0
self._flow_rate_sma = None
try:
self._start_volume = self.read_sensor(stime)
@@ -1214,7 +1233,7 @@ class IUVolume:
self._coordinator.logger.log_invalid_meter_id(stime, self._sensor_id)
else:
self._callback_remove = async_track_state_change_event(
self._hass, self._sensor_id, sensor_state_change
self._hass, self._sensor_id, self.sensor_state_change
)
IUVolume.trackers += 1
@@ -1262,7 +1281,6 @@ class IURunStatus(Enum):
return IURunStatus.RUNNING
if stime >= end_time:
return IURunStatus.EXPIRED
return IURunStatus.UNKNOWN
class IURun(IUBase):
@@ -1300,6 +1318,14 @@ class IURun(IUBase):
self._status = self._get_status(stime)
self.master_run: "IURun" = None
def __str__(self) -> str:
return (
f"status: {self._status.name}, "
f"start: {dt2lstr(self.start_time)}, "
f"end: {dt2lstr(self.end_time)}, "
f"schedule: {self.schedule_name}"
)
@property
def expired(self) -> bool:
"""Indicate if run has expired"""
@@ -1475,22 +1501,24 @@ class IURun(IUBase):
"""Update the count down timers"""
if self.running:
self._remaining_time = self._end_time - stime
total_duration: timedelta = self._end_time - self._start_time
time_elapsed: timedelta = stime - self._start_time
self._percent_complete = int((time_elapsed / total_duration) * 100)
duration: timedelta = self._end_time - self._start_time
elapsed: timedelta = stime - self._start_time
self._percent_complete = (
int((elapsed / duration) * 100) if duration > timedelta(0) else 0
)
return True
return False
def pause(self, stime: datetime) -> None:
"""Change the pause status of the run"""
if self._pause_time is not None:
if self.expired or self._pause_time is not None:
return
self._pause_time = stime
self.update_status(stime)
def resume(self, stime: datetime) -> None:
"""Resume a paused run"""
if self._pause_time is None:
if self.expired or self._pause_time is None:
return
delta = stime - self._pause_time
self._start_time += delta
@@ -1503,6 +1531,7 @@ class IURun(IUBase):
result = OrderedDict()
result[TIMELINE_START] = self._start_time
result[TIMELINE_END] = self._end_time
result[TIMELINE_SCHEDULE] = self.schedule.id1 if self.schedule else 0
result[TIMELINE_SCHEDULE_NAME] = self.schedule_name
result[TIMELINE_ADJUSTMENT] = self.adjustment
return result
@@ -1845,13 +1874,15 @@ class IUScheduleQueue(IURunQueue):
"""Add a manual run to the queue. Cancel any existing
manual or running schedule"""
if self._current_run is not None:
self.pop_run(0)
# Remove any existing manual schedules
if not queue:
if self._current_run is not None:
self.pop_run(0)
for manual in (run for run in self if run.is_manual()):
self.remove_run(manual)
elif self._current_run is not None and not self._current_run.is_manual():
self.pop_run(0)
self._current_run = None
self._next_run = None
@@ -2651,6 +2682,15 @@ class IUSequenceZone(IUBase):
result.append(run)
return result
def start_time(runs: list[IURun]) -> datetime:
result: datetime = None
for run in runs:
if result == None or run.start_time < result:
result = run.start_time
return result
runs = zone_runs()
start = start_time(runs)
result = {}
result[ATTR_INDEX] = self.index
result[ATTR_ENABLED] = self.enabled
@@ -2659,7 +2699,8 @@ class IUSequenceZone(IUBase):
result[ATTR_ICON] = self.icon()
result[ATTR_ADJUSTMENT] = str(self.adjustment)
result[ATTR_ZONE_IDS] = self.zone_ids
result[ATTR_DURATION] = str(calc_on_time(zone_runs()))
result[ATTR_START] = dt.as_local(start) if start else None
result[ATTR_DURATION] = str(calc_on_time(runs))
return result
def muster(self, stime: datetime) -> IURQStatus:
@@ -2732,6 +2773,14 @@ class IUSequenceRun(IUBase):
self._remaining_time = timedelta(0)
self._percent_complete: int = 0
def __str__(self) -> str:
return (
f"status: {self._status.name}, "
f"start: {dt2lstr(self.start_time)}, "
f"end: {dt2lstr(self.end_time)}, "
f"schedule: {self.schedule_name}"
)
@property
def sequence(self) -> "IUSequence":
"""Return the sequence associated with this run"""
@@ -2742,6 +2791,13 @@ class IUSequenceRun(IUBase):
"""Return the schedule associated with this run"""
return self._schedule
@property
def schedule_name(self) -> str:
"""Return the name of the schedule"""
if self._schedule is not None:
return self._schedule.name
return RES_MANUAL
@property
def start_time(self) -> datetime:
"""Return the start time for this sequence"""
@@ -2882,9 +2938,7 @@ class IUSequenceRun(IUBase):
self._accumulated_duration += run.duration
zone.request_update()
self._runs_pre_allocate.clear()
self._status = IURunStatus.status(
stime, self.start_time, self.end_time, self._paused
)
self.update_status(stime)
def first_zone(self) -> IUZone:
"""Return the first zone"""
@@ -2976,8 +3030,8 @@ class IUSequenceRun(IUBase):
run.start_time = max(run.start_time + duration, stime)
run.end_time = max(run.end_time + duration, run.start_time)
run.duration = run.end_time - run.start_time
run.update_status(stime)
run.update_time_remaining(stime)
run.update_status(stime)
if self.running:
if runs is None:
@@ -2993,8 +3047,9 @@ class IUSequenceRun(IUBase):
if end_time is None or run.end_time > end_time:
end_time = run.end_time
self._end_time = end_time
self.update()
self.update_time_remaining(stime)
self.update_status(stime)
self.update(stime)
def skip(self, stime: datetime) -> None:
"""Skip to the next sequence zone"""
@@ -3066,7 +3121,9 @@ class IUSequenceRun(IUBase):
return 3
return 0
def split_run(run: IURun, start: datetime, duration=timedelta(0)) -> None:
def split_run(
run: IURun, szr: IUSequenceZoneRun, start: datetime, duration=timedelta(0)
) -> None:
split = run.zone.runs.add(
stime,
start,
@@ -3075,17 +3132,17 @@ class IUSequenceRun(IUBase):
run.schedule,
self,
)
self._runs[split] = None
self._runs[split] = szr
if self._paused is not None:
return
runs = self._runs.copy()
pause_list = self._runs.copy()
over_run = timedelta(0)
for run in runs:
for run, szr in runs.items():
state = run_state(run)
if state == 2:
split_run(run, stime - self._controller.postamble)
split_run(run, szr, stime - self._controller.postamble)
elif state == 3:
# Create a master postamble run out
if (
@@ -3095,15 +3152,17 @@ class IUSequenceRun(IUBase):
over_run = -self._controller.postamble
run.master_run.start_time = stime + over_run
run.start_time = stime
split_run(run, stime, over_run)
split_run(run, szr, stime, over_run)
elif state == 6:
pause_list.pop(run)
elif state == 5:
split_run(run, stime)
split_run(run, szr, stime)
run.start_time = stime
run.master_run.start_time = stime - self._controller.preamble
elif state == 4:
split_run(run, run.master_run.end_time - self._controller.postamble)
split_run(
run, szr, run.master_run.end_time - self._controller.postamble
)
if over_run != timedelta(0):
self.advance(stime, -over_run, runs)
pause_run(stime, pause_list)
@@ -3125,9 +3184,7 @@ class IUSequenceRun(IUBase):
resume_run(stime, self._runs)
self._end_time += stime - self._paused
self._paused = None
self._status = IURunStatus.status(
stime, self._start_time, self._end_time, self._paused
)
self.update_status(stime)
next_start = min(
(run.start_time for run in self._runs if not run.expired), default=None
@@ -3145,30 +3202,32 @@ class IUSequenceRun(IUBase):
"""Cancel the sequence run"""
self.advance(stime, -(self._end_time - stime))
def update(self) -> bool:
"""Update the status of the sequence"""
async def update_volume(
self, stime: datetime, zone: IUZone, volume: Decimal, rate: Decimal
) -> None:
# pylint: disable=unused-argument
if self._active_zone not in self._volume_stats:
self._volume_stats[self._active_zone] = {}
self._volume_stats[self._active_zone][zone] = volume
self._sequence.volume = sum(
sum(sta.values()) for sta in self._volume_stats.values()
)
if (limit := self._active_zone.sequence_zone.volume) is not None:
current_vol = sum(self._volume_stats[self._active_zone].values())
if current_vol >= limit:
await self._coordinator._hass.services.async_call(
DOMAIN,
SERVICE_SKIP,
{ATTR_ENTITY_ID: self._sequence.entity_id},
)
def update_volume(
stime: datetime, zone: IUZone, volume: Decimal, rate: Decimal
) -> None:
# pylint: disable=unused-argument
if self._active_zone not in self._volume_stats:
self._volume_stats[self._active_zone] = {}
self._volume_stats[self._active_zone][zone] = volume
self._sequence.volume = sum(
sum(sta.values()) for sta in self._volume_stats.values()
)
if (limit := self._active_zone.sequence_zone.volume) is not None:
current_vol = sum(self._volume_stats[self._active_zone].values())
if current_vol >= limit:
self._coordinator.service_call(
SERVICE_SKIP, self._controller, None, self._sequence, {}
)
def update(self, stime: datetime) -> bool:
"""Update the status of the sequence"""
def enable_trackers(sequence_zone: IUSequenceZone) -> None:
for zone in sequence_zone.zones:
self._volume_trackers.append(
zone.volume.track_volume_change(self.uid, update_volume)
zone.volume.track_volume_change(self.uid, self.update_volume)
)
def remove_trackers() -> None:
@@ -3176,17 +3235,37 @@ class IUSequenceRun(IUBase):
tracker()
self._volume_trackers.clear()
def sumarise(stime: datetime) -> dict[IUSequenceZoneRun, dict]:
"""Summarise the runs within each sequence zone run. A dict
is returned with start, end and status"""
result: dict[IUSequenceZoneRun, dict] = {}
for run, szr in self._runs.items():
item = result.get(szr)
if item is None:
item = {}
item["start_time"] = run.start_time
item["end_time"] = run.end_time
result[szr] = item
else:
item["start_time"] = min(item["start_time"], run.start_time)
item["end_time"] = max(item["end_time"], run.end_time)
for item in result.values():
item["status"] = IURunStatus.status(
stime, item["start_time"], item["end_time"], self._paused
)
return result
if self.paused:
return False
return not self._sequence.is_paused
result = False
for run, sequence_zone_run in self._runs.items():
if sequence_zone_run is None:
continue
if run.running and not self.running:
sruns = sumarise(stime)
last_date = max((run["end_time"] for run in sruns.values()), default=None)
for szr, run in sruns.items():
if run["status"] == IURunStatus.RUNNING and not self.running:
# Sequence/sequence zone is starting
self._status = IURunStatus.RUNNING
self._active_zone = sequence_zone_run
self._current_zone = sequence_zone_run
self._active_zone = szr
self._current_zone = szr
self._coordinator.notify_sequence(
EVENT_START,
self._controller,
@@ -3194,24 +3273,24 @@ class IUSequenceRun(IUBase):
self._schedule,
self,
)
enable_trackers(sequence_zone_run.sequence_zone)
enable_trackers(szr.sequence_zone)
self._sequence.volume = None
result |= True
elif run.running and sequence_zone_run != self._active_zone:
elif run["status"] == IURunStatus.RUNNING and szr != self._active_zone:
# Sequence zone is changing
self._active_zone = sequence_zone_run
self._current_zone = sequence_zone_run
self._active_zone = szr
self._current_zone = szr
remove_trackers()
enable_trackers(sequence_zone_run.sequence_zone)
enable_trackers(szr.sequence_zone)
result |= True
elif not run.running and sequence_zone_run == self._active_zone:
elif run["status"] != IURunStatus.RUNNING and szr == self._active_zone:
# Sequence zone is finishing
self._active_zone = None
remove_trackers()
self._current_zone = self.next_sequence_zone(sequence_zone_run)
if self.run_index(run) == len(self._runs) - 1:
self._current_zone = self.next_sequence_zone(szr)
if run["end_time"] == last_date:
# Sequence is finishing
self._status = IURunStatus.EXPIRED
self._coordinator.notify_sequence(
@@ -3227,7 +3306,7 @@ class IUSequenceRun(IUBase):
def update_time_remaining(self, stime: datetime) -> bool:
"""Update the count down timers"""
if not self.running:
if not (self.running or self.paused):
return False
self._remaining_time = self._end_time - stime
elapsed = stime - self._start_time
@@ -3237,6 +3316,14 @@ class IUSequenceRun(IUBase):
)
return True
def _get_status(self, stime: datetime) -> IURunStatus:
"""Determine the state of this run"""
return IURunStatus.status(stime, self._start_time, self._end_time, self._paused)
def update_status(self, stime: datetime) -> None:
"""Update the status of the run"""
self._status = self._get_status(stime)
def as_dict(self, include_expired=False) -> dict:
"""Return this sequence run as a dict"""
result = {}
@@ -3418,7 +3505,7 @@ class IUSequenceQueue(list[IUSequenceRun]):
i -= 1
return modified
def update_queue(self) -> IURQStatus:
def update_queue(self, stime: datetime) -> IURQStatus:
"""Update the run queue"""
# pylint: disable=too-many-branches
status = IURQStatus(0)
@@ -3427,14 +3514,14 @@ class IUSequenceQueue(list[IUSequenceRun]):
status |= IURQStatus.SORTED
for run in self:
if run.update():
if run.update(stime):
self._current_run = None
self._next_run = None
status |= IURQStatus.CHANGED
if self._current_run is None:
for run in self:
if run.running and run.on_time() != timedelta(0):
if (run.running or run.paused) and run.on_time() != timedelta(0):
self._current_run = run
self._next_run = None
status |= IURQStatus.UPDATED
@@ -3442,7 +3529,7 @@ class IUSequenceQueue(list[IUSequenceRun]):
if self._next_run is None:
for run in self:
if not run.running and run.on_time() != timedelta(0):
if not (run.running or run.paused) and run.on_time() != timedelta(0):
self._next_run = run
status |= IURQStatus.UPDATED
break
@@ -4074,7 +4161,11 @@ class IUSequence(IUBase):
if duration is not None and duration == timedelta(0):
duration = None
self._controller.muster_sequence(
self._controller.manual_run_start(stime, delay, queue), self, None, duration
stime,
self._controller.manual_run_start(stime, delay, queue),
self,
None,
duration,
)
def service_cancel(self, data: MappingProxyType, stime: datetime) -> bool:
@@ -4498,6 +4589,7 @@ class IUController(IUBase):
def muster_sequence(
self,
stime: datetime,
earliest: datetime,
sequence: IUSequence,
schedule: IUSchedule,
total_time: timedelta = None,
@@ -4554,7 +4646,7 @@ class IUController(IUBase):
total_time = sequence_run.build(duration_factor)
if total_time > timedelta(0):
start_time = init_run_time(
stime, sequence, schedule, sequence_run.first_zone(), total_time
earliest, sequence, schedule, sequence_run.first_zone(), total_time
)
if start_time is not None:
sequence_run.allocate_runs(stime, start_time)
@@ -4594,34 +4686,35 @@ class IUController(IUBase):
sequence.runs.clear_runs()
zone_status |= sms
if not self._coordinator.tester.enabled or self._coordinator.tester.is_testing:
# pylint: disable=too-many-nested-blocks
# Process sequence schedules
for sequence in self._sequences:
if sequence.is_enabled:
for schedule in sequence.schedules:
if not schedule.enabled:
continue
next_time = stime
while True:
if self.muster_sequence(
next_time, sequence, schedule, None
).is_empty():
break
zone_status |= IURQStatus.EXTENDED
# Process sequence schedules
for sequence in self._sequences:
if sequence.is_enabled:
for schedule in sequence.schedules:
if not schedule.enabled:
continue
next_time = stime
while True:
if self.muster_sequence(
stime, next_time, sequence, schedule, None
).is_empty():
break
zone_status |= IURQStatus.EXTENDED
# Process zone schedules
for zone in self._zones:
if zone.is_enabled:
zone_status |= zone.muster_schedules(stime)
# Process zone schedules
for zone in self._zones:
if zone.is_enabled:
zone_status |= zone.muster_schedules(stime)
# Post processing
for sequence in self._sequences:
zone_status |= sequence.runs.update_queue()
sst = sequence.runs.update_queue(stime)
if sst.has_any(IURQStatus.UPDATED):
sequence.request_update()
zone_status |= sst
for zone in self._zones:
zts = zone.runs.update_queue()
if IURQStatus.CANCELED in zts:
if zts.has_any(IURQStatus.CANCELED | IURQStatus.UPDATED):
zone.request_update()
zone_status |= zts

View File

@@ -15,5 +15,5 @@
"requirements": [
"crontab"
],
"version": "2024.5.0"
"version": "2024.8.0"
}

View File

@@ -1,11 +1,13 @@
"""This module handles the HA service call interface"""
from homeassistant.core import ServiceCall, callback
from homeassistant.core import ServiceCall, SupportsResponse, ServiceResponse, callback
from homeassistant.util import dt
from homeassistant.helpers import entity_platform
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.const import (
SERVICE_RELOAD,
ATTR_ENTITY_ID,
)
from .irrigation_unlimited import IUCoordinator
@@ -35,6 +37,16 @@ from .const import (
SERVICE_SKIP,
SERVICE_PAUSE,
SERVICE_RESUME,
SERVICE_GET_INFO,
ATTR_VERSION,
ATTR_CONTROLLERS,
ATTR_CONTROLLER_ID,
ATTR_ZONES,
ATTR_ZONE_ID,
ATTR_SEQUENCES,
ATTR_INDEX,
ATTR_NAME,
ATTR_ZONE_IDS,
)
@@ -115,9 +127,54 @@ def register_component_services(
"""Reload schedule."""
coordinator.service_call(call.service, None, None, None, call.data)
@callback
async def get_info_service_handler(call: ServiceCall) -> ServiceResponse:
"""Return configuration"""
data = {}
data[ATTR_VERSION] = "1.0.0"
data[ATTR_CONTROLLERS] = [
{
ATTR_INDEX: ctl.index,
ATTR_CONTROLLER_ID: ctl.controller_id,
ATTR_NAME: ctl.name,
ATTR_ENTITY_ID: ctl.entity_id,
ATTR_ZONES: [
{
ATTR_INDEX: zone.index,
ATTR_ZONE_ID: zone.zone_id,
ATTR_NAME: zone.name,
ATTR_ENTITY_ID: zone.entity_id,
}
for zone in ctl.zones
],
ATTR_SEQUENCES: [
{
ATTR_INDEX: seq.index,
ATTR_NAME: seq.name,
ATTR_ENTITY_ID: seq.entity_id,
ATTR_ZONES: [
{ATTR_INDEX: sqz.index, ATTR_ZONE_IDS: sqz.zone_ids}
for sqz in seq.zones
],
}
for seq in ctl.sequences
],
}
for ctl in coordinator.controllers
]
return data
component.hass.services.async_register(
DOMAIN,
SERVICE_LOAD_SCHEDULE,
load_schedule_service_handler,
LOAD_SCHEDULE_SCHEMA,
)
component.hass.services.async_register(
DOMAIN,
SERVICE_GET_INFO,
get_info_service_handler,
{},
supports_response=SupportsResponse.ONLY,
)

View File

@@ -397,6 +397,10 @@ reload:
name: Reload
description: Reload the configuration
get_info:
name: Get Info
description: Get configuration information
load_schedule:
name: Load schedule
description: Load a schedule.

View File

@@ -0,0 +1,682 @@
"""Component to allow running Python scripts."""
import asyncio
import glob
import json
import logging
import os
import time
import traceback
from typing import Any, Callable, Dict, List, Set, Union
import voluptuous as vol
from watchdog.events import DirModifiedEvent, FileSystemEvent, FileSystemEventHandler
import watchdog.observers
from homeassistant.config import async_hass_config_yaml
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
SERVICE_RELOAD,
)
from homeassistant.core import Config, Event as HAEvent, HomeAssistant, ServiceCall
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import DATA_RESTORE_STATE
from homeassistant.loader import bind_hass
from .const import (
CONF_ALLOW_ALL_IMPORTS,
CONF_HASS_IS_GLOBAL,
CONFIG_ENTRY,
CONFIG_ENTRY_OLD,
DOMAIN,
FOLDER,
LOGGER_PATH,
REQUIREMENTS_FILE,
SERVICE_JUPYTER_KERNEL_START,
UNSUB_LISTENERS,
WATCHDOG_TASK,
)
from .eval import AstEval
from .event import Event
from .function import Function
from .global_ctx import GlobalContext, GlobalContextMgr
from .jupyter_kernel import Kernel
from .mqtt import Mqtt
from .requirements import install_requirements
from .state import State, StateVal
from .trigger import TrigTime
from .webhook import Webhook
_LOGGER = logging.getLogger(LOGGER_PATH)
PYSCRIPT_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALLOW_ALL_IMPORTS, default=False): cv.boolean,
vol.Optional(CONF_HASS_IS_GLOBAL, default=False): cv.boolean,
},
extra=vol.ALLOW_EXTRA,
)
CONFIG_SCHEMA = vol.Schema({DOMAIN: PYSCRIPT_SCHEMA}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Component setup, run import config flow for each entry in config."""
await restore_state(hass)
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
async def restore_state(hass: HomeAssistant) -> None:
"""Restores the persisted pyscript state."""
# this is a hack accessing hass internals; should re-implement using RestoreEntity
restore_data = hass.data[DATA_RESTORE_STATE]
for entity_id, value in restore_data.last_states.items():
if entity_id.startswith("pyscript."):
last_state = value.state
hass.states.async_set(entity_id, last_state.state, last_state.attributes)
async def update_yaml_config(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Update the yaml config."""
try:
conf = await async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(err)
return
config = PYSCRIPT_SCHEMA(conf.get(DOMAIN, {}))
#
# If data in config doesn't match config entry, trigger a config import
# so that the config entry can get updated
#
if config != config_entry.data:
await hass.config_entries.flow.async_init(DOMAIN, context={"source": SOURCE_IMPORT}, data=config)
#
# if hass_is_global or allow_all_imports have changed, we need to reload all scripts
# since they affect all scripts
#
config_save = {
param: config_entry.data.get(param, False) for param in [CONF_HASS_IS_GLOBAL, CONF_ALLOW_ALL_IMPORTS]
}
if DOMAIN not in hass.data:
hass.data.setdefault(DOMAIN, {})
if CONFIG_ENTRY_OLD in hass.data[DOMAIN]:
old_entry = hass.data[DOMAIN][CONFIG_ENTRY_OLD]
hass.data[DOMAIN][CONFIG_ENTRY_OLD] = config_save
for param in [CONF_HASS_IS_GLOBAL, CONF_ALLOW_ALL_IMPORTS]:
if old_entry.get(param, False) != config_entry.data.get(param, False):
return True
hass.data[DOMAIN][CONFIG_ENTRY_OLD] = config_save
return False
def start_global_contexts(global_ctx_only: str = None) -> None:
"""Start all the file and apps global contexts."""
start_list = []
for global_ctx_name, global_ctx in GlobalContextMgr.items():
idx = global_ctx_name.find(".")
if idx < 0 or global_ctx_name[0:idx] not in {"file", "apps", "scripts"}:
continue
if global_ctx_only is not None and global_ctx_only != "*":
if global_ctx_name != global_ctx_only and not global_ctx_name.startswith(global_ctx_only + "."):
continue
global_ctx.set_auto_start(True)
start_list.append(global_ctx)
for global_ctx in start_list:
global_ctx.start()
async def watchdog_start(
hass: HomeAssistant, pyscript_folder: str, reload_scripts_handler: Callable[[None], None]
) -> None:
"""Start watchdog thread to look for changed files in pyscript_folder."""
if WATCHDOG_TASK in hass.data[DOMAIN]:
return
class WatchDogHandler(FileSystemEventHandler):
"""Class for handling watchdog events."""
def __init__(
self, watchdog_q: asyncio.Queue, observer: watchdog.observers.Observer, path: str
) -> None:
self.watchdog_q = watchdog_q
self._observer = observer
self._observer.schedule(self, path, recursive=True)
if not hass.is_running:
hass.bus.listen_once(EVENT_HOMEASSISTANT_STARTED, self.startup)
else:
self.startup(None)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self.shutdown)
_LOGGER.debug("watchdog init path=%s", path)
def startup(self, event: Event | None) -> None:
"""Start the observer."""
_LOGGER.debug("watchdog startup")
self._observer.start()
def shutdown(self, event: Event | None) -> None:
"""Stop the observer."""
self._observer.stop()
self._observer.join()
_LOGGER.debug("watchdog shutdown")
def process(self, event: FileSystemEvent) -> None:
"""Send watchdog events to main loop task."""
_LOGGER.debug("watchdog process(%s)", event)
hass.loop.call_soon_threadsafe(self.watchdog_q.put_nowait, event)
def on_modified(self, event: FileSystemEvent) -> None:
"""File modified."""
self.process(event)
def on_moved(self, event: FileSystemEvent) -> None:
"""File moved."""
self.process(event)
def on_created(self, event: FileSystemEvent) -> None:
"""File created."""
self.process(event)
def on_deleted(self, event: FileSystemEvent) -> None:
"""File deleted."""
self.process(event)
async def task_watchdog(watchdog_q: asyncio.Queue) -> None:
def check_event(event, do_reload: bool) -> bool:
"""Check if event should trigger a reload."""
if event.is_directory:
# don't reload if it's just a directory modified
if isinstance(event, DirModifiedEvent):
return do_reload
return True
# only reload if it's a script, yaml, or requirements.txt file
for valid_suffix in [".py", ".yaml", "/" + REQUIREMENTS_FILE]:
if event.src_path.endswith(valid_suffix):
return True
return do_reload
while True:
try:
#
# since some file/dir changes create multiple events, we consume all
# events in a small window; first wait indefinitely for next event
#
do_reload = check_event(await watchdog_q.get(), False)
#
# now consume all additional events with 50ms timeout or 500ms elapsed
#
t_start = time.monotonic()
while time.monotonic() - t_start < 0.5:
try:
do_reload = check_event(
await asyncio.wait_for(watchdog_q.get(), timeout=0.05), do_reload
)
except asyncio.TimeoutError:
break
if do_reload:
await reload_scripts_handler(None)
except asyncio.CancelledError:
raise
except Exception:
_LOGGER.error("task_watchdog: got exception %s", traceback.format_exc(-1))
watchdog_q = asyncio.Queue(0)
observer = watchdog.observers.Observer()
if observer is not None:
# don't run watchdog when we are testing (Observer() patches to None)
hass.data[DOMAIN][WATCHDOG_TASK] = Function.create_task(task_watchdog(watchdog_q))
await hass.async_add_executor_job(WatchDogHandler, watchdog_q, observer, pyscript_folder)
_LOGGER.debug("watchdog started job and task folder=%s", pyscript_folder)
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Initialize the pyscript config entry."""
global_ctx_only = None
doing_reload = False
if Function.hass:
#
# reload yaml if this isn't the first time (ie, on reload)
#
doing_reload = True
if await update_yaml_config(hass, config_entry):
global_ctx_only = "*"
Function.init(hass)
Event.init(hass)
Mqtt.init(hass)
TrigTime.init(hass)
State.init(hass)
Webhook.init(hass)
State.register_functions()
GlobalContextMgr.init()
pyscript_folder = hass.config.path(FOLDER)
if not await hass.async_add_executor_job(os.path.isdir, pyscript_folder):
_LOGGER.debug("Folder %s not found in configuration folder, creating it", FOLDER)
await hass.async_add_executor_job(os.makedirs, pyscript_folder)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][CONFIG_ENTRY] = config_entry
hass.data[DOMAIN][UNSUB_LISTENERS] = []
State.set_pyscript_config(config_entry.data)
await install_requirements(hass, config_entry, pyscript_folder)
await load_scripts(hass, config_entry.data, global_ctx_only=global_ctx_only)
async def reload_scripts_handler(call: ServiceCall) -> None:
"""Handle reload service calls."""
_LOGGER.debug("reload: yaml, reloading scripts, and restarting")
global_ctx_only = call.data.get("global_ctx", None) if call else None
if await update_yaml_config(hass, config_entry):
global_ctx_only = "*"
State.set_pyscript_config(config_entry.data)
await State.get_service_params()
await install_requirements(hass, config_entry, pyscript_folder)
await load_scripts(hass, config_entry.data, global_ctx_only=global_ctx_only)
start_global_contexts(global_ctx_only=global_ctx_only)
hass.services.async_register(DOMAIN, SERVICE_RELOAD, reload_scripts_handler)
async def jupyter_kernel_start(call: ServiceCall) -> None:
"""Handle Jupyter kernel start call."""
_LOGGER.debug("service call to jupyter_kernel_start: %s", call.data)
global_ctx_name = GlobalContextMgr.new_name("jupyter_")
global_ctx = GlobalContext(
global_ctx_name, global_sym_table={"__name__": global_ctx_name}, manager=GlobalContextMgr
)
global_ctx.set_auto_start(True)
GlobalContextMgr.set(global_ctx_name, global_ctx)
ast_ctx = AstEval(global_ctx_name, global_ctx)
Function.install_ast_funcs(ast_ctx)
kernel = Kernel(call.data, ast_ctx, global_ctx, global_ctx_name)
await kernel.session_start()
hass.states.async_set(call.data["state_var"], json.dumps(kernel.get_ports()))
def state_var_remove():
hass.states.async_remove(call.data["state_var"])
kernel.set_session_cleanup_callback(state_var_remove)
hass.services.async_register(DOMAIN, SERVICE_JUPYTER_KERNEL_START, jupyter_kernel_start)
async def state_changed(event: HAEvent) -> None:
var_name = event.data["entity_id"]
if event.data.get("new_state", None):
new_val = StateVal(event.data["new_state"])
else:
# state variable has been deleted
new_val = None
if event.data.get("old_state", None):
old_val = StateVal(event.data["old_state"])
else:
# no previous state
old_val = None
new_vars = {var_name: new_val, f"{var_name}.old": old_val}
func_args = {
"trigger_type": "state",
"var_name": var_name,
"value": new_val,
"old_value": old_val,
"context": event.context,
}
await State.update(new_vars, func_args)
async def hass_started(event: HAEvent) -> None:
_LOGGER.debug("adding state changed listener and starting global contexts")
await State.get_service_params()
hass.data[DOMAIN][UNSUB_LISTENERS].append(hass.bus.async_listen(EVENT_STATE_CHANGED, state_changed))
start_global_contexts()
async def hass_stop(event: HAEvent) -> None:
if WATCHDOG_TASK in hass.data[DOMAIN]:
Function.reaper_cancel(hass.data[DOMAIN][WATCHDOG_TASK])
del hass.data[DOMAIN][WATCHDOG_TASK]
_LOGGER.debug("stopping global contexts")
await unload_scripts(unload_all=True)
# sync with waiter, and then tell waiter and reaper tasks to exit
await Function.waiter_sync()
await Function.waiter_stop()
await Function.reaper_stop()
# Store callbacks to event listeners so we can unsubscribe on unload
hass.data[DOMAIN][UNSUB_LISTENERS].append(
hass.bus.async_listen(EVENT_HOMEASSISTANT_STARTED, hass_started)
)
hass.data[DOMAIN][UNSUB_LISTENERS].append(hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, hass_stop))
await watchdog_start(hass, pyscript_folder, reload_scripts_handler)
if doing_reload:
start_global_contexts(global_ctx_only="*")
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
_LOGGER.info("Unloading all scripts")
await unload_scripts(unload_all=True)
for unsub_listener in hass.data[DOMAIN][UNSUB_LISTENERS]:
unsub_listener()
hass.data[DOMAIN][UNSUB_LISTENERS] = []
# sync with waiter, and then tell waiter and reaper tasks to exit
await Function.waiter_sync()
await Function.waiter_stop()
await Function.reaper_stop()
return True
async def unload_scripts(global_ctx_only: str = None, unload_all: bool = False) -> None:
"""Unload all scripts from GlobalContextMgr with given name prefixes."""
ctx_delete = {}
for global_ctx_name, global_ctx in GlobalContextMgr.items():
if not unload_all:
idx = global_ctx_name.find(".")
if idx < 0 or global_ctx_name[0:idx] not in {"file", "apps", "modules", "scripts"}:
continue
if global_ctx_only is not None:
if global_ctx_name != global_ctx_only and not global_ctx_name.startswith(global_ctx_only + "."):
continue
global_ctx.stop()
ctx_delete[global_ctx_name] = global_ctx
for global_ctx_name, global_ctx in ctx_delete.items():
GlobalContextMgr.delete(global_ctx_name)
await Function.waiter_sync()
@bind_hass
async def load_scripts(hass: HomeAssistant, config_data: Dict[str, Any], global_ctx_only: str = None):
"""Load all python scripts in FOLDER."""
class SourceFile:
"""Class for information about a source file."""
def __init__(
self,
global_ctx_name=None,
file_path=None,
rel_path=None,
rel_import_path=None,
fq_mod_name=None,
check_config=None,
app_config=None,
source=None,
mtime=None,
autoload=None,
):
self.global_ctx_name = global_ctx_name
self.file_path = file_path
self.rel_path = rel_path
self.rel_import_path = rel_import_path
self.fq_mod_name = fq_mod_name
self.check_config = check_config
self.app_config = app_config
self.source = source
self.mtime = mtime
self.autoload = autoload
self.force = False
pyscript_dir = hass.config.path(FOLDER)
def glob_read_files(
load_paths: List[Set[Union[str, bool]]], apps_config: Dict[str, Any]
) -> Dict[str, SourceFile]:
"""Expand globs and read all the source files."""
ctx2source = {}
for path, match, check_config, autoload in load_paths:
for this_path in sorted(glob.glob(os.path.join(pyscript_dir, path, match), recursive=True)):
rel_import_path = None
rel_path = this_path
if rel_path.startswith(pyscript_dir):
rel_path = rel_path[len(pyscript_dir) :]
if rel_path.startswith("/"):
rel_path = rel_path[1:]
if rel_path[0] == "#" or rel_path.find("/#") >= 0:
# skip "commented" files and directories
continue
mod_name = rel_path[0:-3]
if mod_name.endswith("/__init__"):
rel_import_path = mod_name
mod_name = mod_name[: -len("/__init__")]
mod_name = mod_name.replace("/", ".")
if path == "":
global_ctx_name = f"file.{mod_name}"
fq_mod_name = mod_name
else:
fq_mod_name = global_ctx_name = mod_name
i = fq_mod_name.find(".")
if i >= 0:
fq_mod_name = fq_mod_name[i + 1 :]
app_config = None
if global_ctx_name in ctx2source:
# the globs result in apps/APP/__init__.py matching twice, so skip the 2nd time
# also skip apps/APP.py if apps/APP/__init__.py is present
continue
if check_config:
app_name = fq_mod_name
i = app_name.find(".")
if i >= 0:
app_name = app_name[0:i]
if not isinstance(apps_config, dict) or app_name not in apps_config:
_LOGGER.debug(
"load_scripts: skipping %s (app_name=%s) because config not present",
this_path,
app_name,
)
continue
app_config = apps_config[app_name]
try:
with open(this_path, encoding="utf-8") as file_desc:
source = file_desc.read()
mtime = os.path.getmtime(this_path)
except Exception as exc:
_LOGGER.error("load_scripts: skipping %s due to exception %s", this_path, exc)
continue
ctx2source[global_ctx_name] = SourceFile(
global_ctx_name=global_ctx_name,
file_path=this_path,
rel_path=rel_path,
rel_import_path=rel_import_path,
fq_mod_name=fq_mod_name,
check_config=check_config,
app_config=app_config,
source=source,
mtime=mtime,
autoload=autoload,
)
return ctx2source
load_paths = [
# path, glob, check_config, autoload
["", "*.py", False, True],
["apps", "*/__init__.py", True, True],
["apps", "*.py", True, True],
["apps", "*/**/*.py", False, False],
["modules", "*/__init__.py", False, False],
["modules", "*.py", False, False],
["modules", "*/**/*.py", False, False],
["scripts", "**/*.py", False, True],
]
#
# get current global contexts
#
ctx_all = {}
for global_ctx_name, global_ctx in GlobalContextMgr.items():
idx = global_ctx_name.find(".")
if idx < 0 or global_ctx_name[0:idx] not in {"file", "apps", "modules", "scripts"}:
continue
ctx_all[global_ctx_name] = global_ctx
#
# get list and contents of all source files
#
apps_config = config_data.get("apps", None)
ctx2files = await hass.async_add_executor_job(glob_read_files, load_paths, apps_config)
#
# figure out what to reload based on global_ctx_only and what's changed
#
ctx_delete = set()
if global_ctx_only is not None and global_ctx_only != "*":
if global_ctx_only not in ctx_all and global_ctx_only not in ctx2files:
_LOGGER.error("pyscript.reload: no global context '%s' to reload", global_ctx_only)
return
if global_ctx_only not in ctx2files:
ctx_delete.add(global_ctx_only)
else:
ctx2files[global_ctx_only].force = True
elif global_ctx_only == "*":
ctx_delete = set(ctx_all.keys())
for _, src_info in ctx2files.items():
src_info.force = True
else:
# delete all global_ctxs that aren't present in current files
for global_ctx_name, global_ctx in ctx_all.items():
if global_ctx_name not in ctx2files:
ctx_delete.add(global_ctx_name)
# delete all global_ctxs that have changeed source or mtime
for global_ctx_name, src_info in ctx2files.items():
if global_ctx_name in ctx_all:
ctx = ctx_all[global_ctx_name]
if (
src_info.source != ctx.get_source()
or src_info.app_config != ctx.get_app_config()
or src_info.mtime != ctx.get_mtime()
):
ctx_delete.add(global_ctx_name)
src_info.force = True
else:
src_info.force = src_info.autoload
#
# force reload if any files uses a module that is bring reloaded by
# recursively following each import; first find which modules are
# being reloaded
#
will_reload = set()
for global_ctx_name, src_info in ctx2files.items():
if global_ctx_name.startswith("modules.") and (global_ctx_name in ctx_delete or src_info.force):
parts = global_ctx_name.split(".")
root = f"{parts[0]}.{parts[1]}"
will_reload.add(root)
if len(will_reload) > 0:
def import_recurse(ctx_name, visited, ctx2imports):
if ctx_name in visited or ctx_name in ctx2imports:
return ctx2imports.get(ctx_name, set())
visited.add(ctx_name)
ctx = GlobalContextMgr.get(ctx_name)
if not ctx:
return set()
ctx2imports[ctx_name] = set()
for imp_name in ctx.get_imports():
ctx2imports[ctx_name].add(imp_name)
ctx2imports[ctx_name].update(import_recurse(imp_name, visited, ctx2imports))
return ctx2imports[ctx_name]
ctx2imports = {}
for global_ctx_name, global_ctx in ctx_all.items():
if global_ctx_name not in ctx2imports:
visited = set()
import_recurse(global_ctx_name, visited, ctx2imports)
for mod_name in ctx2imports.get(global_ctx_name, set()):
parts = mod_name.split(".")
root = f"{parts[0]}.{parts[1]}"
if root in will_reload:
ctx_delete.add(global_ctx_name)
if global_ctx_name in ctx2files:
ctx2files[global_ctx_name].force = True
#
# if any file in an app or module has changed, then reload just the top-level
# __init__.py or module/app .py file, and delete everything else
#
done = set()
for global_ctx_name, src_info in ctx2files.items():
if not src_info.force:
continue
if not global_ctx_name.startswith("apps.") and not global_ctx_name.startswith("modules."):
continue
parts = global_ctx_name.split(".")
root = f"{parts[0]}.{parts[1]}"
if root in done:
continue
pkg_path = f"{parts[0]}/{parts[1]}/__init__.py"
mod_path = f"{parts[0]}/{parts[1]}.py"
for ctx_name, this_src_info in ctx2files.items():
if ctx_name == root or ctx_name.startswith(f"{root}."):
if this_src_info.rel_path in {pkg_path, mod_path}:
this_src_info.force = True
else:
this_src_info.force = False
ctx_delete.add(ctx_name)
done.add(root)
#
# delete contexts that are no longer needed or will be reloaded
#
for global_ctx_name in ctx_delete:
if global_ctx_name in ctx_all:
global_ctx = ctx_all[global_ctx_name]
global_ctx.stop()
if global_ctx_name not in ctx2files or not ctx2files[global_ctx_name].autoload:
_LOGGER.info("Unloaded %s", global_ctx.get_file_path())
GlobalContextMgr.delete(global_ctx_name)
await Function.waiter_sync()
#
# now load the requested files, and files that depend on loaded files
#
for global_ctx_name, src_info in sorted(ctx2files.items()):
if not src_info.autoload or not src_info.force:
continue
global_ctx = GlobalContext(
src_info.global_ctx_name,
global_sym_table={"__name__": src_info.fq_mod_name},
manager=GlobalContextMgr,
rel_import_path=src_info.rel_import_path,
app_config=src_info.app_config,
source=src_info.source,
mtime=src_info.mtime,
)
reload = src_info.global_ctx_name in ctx_delete
await GlobalContextMgr.load_file(
global_ctx, src_info.file_path, source=src_info.source, reload=reload
)

View File

@@ -0,0 +1,139 @@
"""Config flow for pyscript."""
import json
from typing import Any, Dict
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.core import callback
from .const import CONF_ALLOW_ALL_IMPORTS, CONF_HASS_IS_GLOBAL, CONF_INSTALLED_PACKAGES, DOMAIN
CONF_BOOL_ALL = {CONF_ALLOW_ALL_IMPORTS, CONF_HASS_IS_GLOBAL}
PYSCRIPT_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALLOW_ALL_IMPORTS, default=False): bool,
vol.Optional(CONF_HASS_IS_GLOBAL, default=False): bool,
},
extra=vol.ALLOW_EXTRA,
)
class PyscriptOptionsConfigFlow(config_entries.OptionsFlow):
"""Handle a pyscript options flow."""
def __init__(self, config_entry: ConfigEntry) -> None:
"""Initialize pyscript options flow."""
self.config_entry = config_entry
self._show_form = False
async def async_step_init(self, user_input: Dict[str, Any] = None) -> Dict[str, Any]:
"""Manage the pyscript options."""
if self.config_entry.source == SOURCE_IMPORT:
self._show_form = True
return await self.async_step_no_ui_configuration_allowed()
if user_input is None:
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(name, default=self.config_entry.data.get(name, False)): bool
for name in CONF_BOOL_ALL
},
extra=vol.ALLOW_EXTRA,
),
)
if any(
name not in self.config_entry.data or user_input[name] != self.config_entry.data[name]
for name in CONF_BOOL_ALL
):
updated_data = self.config_entry.data.copy()
updated_data.update(user_input)
self.hass.config_entries.async_update_entry(entry=self.config_entry, data=updated_data)
return self.async_create_entry(title="", data={})
self._show_form = True
return await self.async_step_no_update()
async def async_step_no_ui_configuration_allowed(
self, user_input: Dict[str, Any] = None
) -> Dict[str, Any]:
"""Tell user no UI configuration is allowed."""
if self._show_form:
self._show_form = False
return self.async_show_form(step_id="no_ui_configuration_allowed", data_schema=vol.Schema({}))
return self.async_create_entry(title="", data={})
async def async_step_no_update(self, user_input: Dict[str, Any] = None) -> Dict[str, Any]:
"""Tell user no update to process."""
if self._show_form:
self._show_form = False
return self.async_show_form(step_id="no_update", data_schema=vol.Schema({}))
return self.async_create_entry(title="", data={})
class PyscriptConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a pyscript config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> PyscriptOptionsConfigFlow:
"""Get the options flow for this handler."""
return PyscriptOptionsConfigFlow(config_entry)
async def async_step_user(self, user_input: Dict[str, Any] = None) -> Dict[str, Any]:
"""Handle a flow initialized by the user."""
if user_input is not None:
if len(self.hass.config_entries.async_entries(DOMAIN)) > 0:
return self.async_abort(reason="single_instance_allowed")
await self.async_set_unique_id(DOMAIN)
return self.async_create_entry(title=DOMAIN, data=user_input)
return self.async_show_form(step_id="user", data_schema=PYSCRIPT_SCHEMA)
async def async_step_import(self, import_config: Dict[str, Any] = None) -> Dict[str, Any]:
"""Import a config entry from configuration.yaml."""
# Convert OrderedDict to dict
import_config = json.loads(json.dumps(import_config))
# Check if import config entry matches any existing config entries
# so we can update it if necessary
entries = self.hass.config_entries.async_entries(DOMAIN)
if entries:
entry = entries[0]
updated_data = entry.data.copy()
# Update values for all keys, excluding `allow_all_imports` for entries
# set up through the UI.
for key, val in import_config.items():
if entry.source == SOURCE_IMPORT or key not in CONF_BOOL_ALL:
updated_data[key] = val
# Remove values for all keys in entry.data that are not in the imported config,
# excluding `allow_all_imports` for entries set up through the UI.
for key in entry.data:
if (
(entry.source == SOURCE_IMPORT or key not in CONF_BOOL_ALL)
and key != CONF_INSTALLED_PACKAGES
and key not in import_config
):
updated_data.pop(key)
# Update and reload entry if data needs to be updated
if updated_data != entry.data:
self.hass.config_entries.async_update_entry(entry=entry, data=updated_data)
return self.async_abort(reason="updated_entry")
return self.async_abort(reason="already_configured")
return await self.async_step_user(user_input=import_config)

View File

@@ -0,0 +1,63 @@
"""Define pyscript-wide constants."""
#
# 2023.7 supports service response; handle older versions by defaulting enum
# Should eventually deprecate this and just use SupportsResponse import
#
try:
from homeassistant.core import SupportsResponse
SERVICE_RESPONSE_NONE = SupportsResponse.NONE
SERVICE_RESPONSE_OPTIONAL = SupportsResponse.OPTIONAL
SERVICE_RESPONSE_ONLY = SupportsResponse.ONLY
except ImportError:
SERVICE_RESPONSE_NONE = None
SERVICE_RESPONSE_OPTIONAL = None
SERVICE_RESPONSE_ONLY = None
DOMAIN = "pyscript"
CONFIG_ENTRY = "config_entry"
CONFIG_ENTRY_OLD = "config_entry_old"
UNSUB_LISTENERS = "unsub_listeners"
FOLDER = "pyscript"
UNPINNED_VERSION = "_unpinned_version"
ATTR_INSTALLED_VERSION = "installed_version"
ATTR_SOURCES = "sources"
ATTR_VERSION = "version"
CONF_ALLOW_ALL_IMPORTS = "allow_all_imports"
CONF_HASS_IS_GLOBAL = "hass_is_global"
CONF_INSTALLED_PACKAGES = "_installed_packages"
SERVICE_JUPYTER_KERNEL_START = "jupyter_kernel_start"
LOGGER_PATH = "custom_components.pyscript"
REQUIREMENTS_FILE = "requirements.txt"
REQUIREMENTS_PATHS = ("", "apps/*", "modules/*", "scripts/**")
WATCHDOG_TASK = "watch_dog_task"
ALLOWED_IMPORTS = {
"black",
"cmath",
"datetime",
"decimal",
"fractions",
"functools",
"homeassistant.const",
"isort",
"json",
"math",
"number",
"random",
"re",
"statistics",
"string",
"time",
"voluptuous",
}

View File

@@ -0,0 +1,19 @@
"""Entity Classes."""
from homeassistant.const import STATE_UNKNOWN
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import StateType
class PyscriptEntity(RestoreEntity):
"""Generic Pyscript Entity."""
_attr_extra_state_attributes: dict
_attr_state: StateType = STATE_UNKNOWN
def set_state(self, state):
"""Set the state."""
self._attr_state = state
def set_attributes(self, attributes):
"""Set Attributes."""
self._attr_extra_state_attributes = attributes

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,76 @@
"""Handles event firing and notification."""
import logging
from .const import LOGGER_PATH
_LOGGER = logging.getLogger(LOGGER_PATH + ".event")
class Event:
"""Define event functions."""
#
# Global hass instance
#
hass = None
#
# notify message queues by event type
#
notify = {}
notify_remove = {}
def __init__(self):
"""Warn on Event instantiation."""
_LOGGER.error("Event class is not meant to be instantiated")
@classmethod
def init(cls, hass):
"""Initialize Event."""
cls.hass = hass
@classmethod
async def event_listener(cls, event):
"""Listen callback for given event which updates any notifications."""
func_args = {
"trigger_type": "event",
"event_type": event.event_type,
"context": event.context,
}
func_args.update(event.data)
await cls.update(event.event_type, func_args)
@classmethod
def notify_add(cls, event_type, queue):
"""Register to notify for events of given type to be sent to queue."""
if event_type not in cls.notify:
cls.notify[event_type] = set()
_LOGGER.debug("event.notify_add(%s) -> adding event listener", event_type)
cls.notify_remove[event_type] = cls.hass.bus.async_listen(event_type, cls.event_listener)
cls.notify[event_type].add(queue)
@classmethod
def notify_del(cls, event_type, queue):
"""Unregister to notify for events of given type for given queue."""
if event_type not in cls.notify or queue not in cls.notify[event_type]:
return
cls.notify[event_type].discard(queue)
if len(cls.notify[event_type]) == 0:
cls.notify_remove[event_type]()
_LOGGER.debug("event.notify_del(%s) -> removing event listener", event_type)
del cls.notify[event_type]
del cls.notify_remove[event_type]
@classmethod
async def update(cls, event_type, func_args):
"""Deliver all notifications for an event of the given type."""
_LOGGER.debug("event.update(%s, %s)", event_type, func_args)
if event_type in cls.notify:
for queue in cls.notify[event_type]:
await queue.put(["event", func_args.copy()])

View File

@@ -0,0 +1,519 @@
"""Function call handling."""
import asyncio
import logging
import traceback
from homeassistant.core import Context
from .const import LOGGER_PATH, SERVICE_RESPONSE_NONE, SERVICE_RESPONSE_ONLY
_LOGGER = logging.getLogger(LOGGER_PATH + ".function")
class Function:
"""Define function handler functions."""
#
# Global hass instance
#
hass = None
#
# Mappings of tasks ids <-> task names
#
unique_task2name = {}
unique_name2task = {}
#
# Mappings of task id to hass contexts
task2context = {}
#
# Set of tasks that are running
#
our_tasks = set()
#
# Done callbacks for each task
#
task2cb = {}
#
# initial list of available functions
#
functions = {}
#
# Functions that take the AstEval context as a first argument,
# which is needed by a handful of special functions that need the
# ast context
#
ast_functions = {}
#
# task id of the task that cancels and waits for other tasks,
#
task_reaper = None
task_reaper_q = None
#
# task id of the task that awaits for coros (used by shutdown triggers)
#
task_waiter = None
task_waiter_q = None
#
# reference counting for service registrations; the new @service trigger
# registers the service call before the old one is removed, so we only
# remove the service registration when the reference count goes to zero
#
service_cnt = {}
#
# save the global_ctx name where a service is registered so we can raise
# an exception if it gets registered by a different global_ctx.
#
service2global_ctx = {}
def __init__(self):
"""Warn on Function instantiation."""
_LOGGER.error("Function class is not meant to be instantiated")
@classmethod
def init(cls, hass):
"""Initialize Function."""
cls.hass = hass
cls.functions.update(
{
"event.fire": cls.event_fire,
"service.call": cls.service_call,
"service.has_service": cls.service_has_service,
"task.cancel": cls.user_task_cancel,
"task.current_task": cls.user_task_current_task,
"task.remove_done_callback": cls.user_task_remove_done_callback,
"task.sleep": cls.async_sleep,
"task.wait": cls.user_task_wait,
}
)
cls.ast_functions.update(
{
"log.debug": lambda ast_ctx: ast_ctx.get_logger().debug,
"log.error": lambda ast_ctx: ast_ctx.get_logger().error,
"log.info": lambda ast_ctx: ast_ctx.get_logger().info,
"log.warning": lambda ast_ctx: ast_ctx.get_logger().warning,
"print": lambda ast_ctx: ast_ctx.get_logger().debug,
"task.name2id": cls.task_name2id_factory,
"task.unique": cls.task_unique_factory,
}
)
#
# start a task which is a reaper for canceled tasks, since some # functions
# like TrigInfo.stop() can't be async (it's called from a __del__ method)
#
async def task_reaper(reaper_q):
while True:
try:
cmd = await reaper_q.get()
if cmd[0] == "exit":
return
if cmd[0] == "cancel":
try:
cmd[1].cancel()
await cmd[1]
except asyncio.CancelledError:
pass
else:
_LOGGER.error("task_reaper: unknown command %s", cmd[0])
except asyncio.CancelledError:
raise
except Exception:
_LOGGER.error("task_reaper: got exception %s", traceback.format_exc(-1))
if not cls.task_reaper:
cls.task_reaper_q = asyncio.Queue(0)
cls.task_reaper = cls.create_task(task_reaper(cls.task_reaper_q))
#
# start a task which creates tasks to run coros, and then syncs on their completion;
# this is used by the shutdown trigger
#
async def task_waiter(waiter_q):
aws = []
while True:
try:
cmd = await waiter_q.get()
if cmd[0] == "exit":
return
if cmd[0] == "await":
aws.append(cls.create_task(cmd[1]))
elif cmd[0] == "sync":
if len(aws) > 0:
await asyncio.gather(*aws)
aws = []
await cmd[1].put(0)
else:
_LOGGER.error("task_waiter: unknown command %s", cmd[0])
except asyncio.CancelledError:
raise
except Exception:
_LOGGER.error("task_waiter: got exception %s", traceback.format_exc(-1))
if not cls.task_waiter:
cls.task_waiter_q = asyncio.Queue(0)
cls.task_waiter = cls.create_task(task_waiter(cls.task_waiter_q))
@classmethod
def reaper_cancel(cls, task):
"""Send a task to be canceled by the reaper."""
cls.task_reaper_q.put_nowait(["cancel", task])
@classmethod
async def reaper_stop(cls):
"""Tell the reaper task to exit."""
if cls.task_reaper:
cls.task_reaper_q.put_nowait(["exit"])
await cls.task_reaper
cls.task_reaper = None
cls.task_reaper_q = None
@classmethod
def waiter_await(cls, coro):
"""Send a coro to be awaited by the waiter task."""
cls.task_waiter_q.put_nowait(["await", coro])
@classmethod
async def waiter_sync(cls):
"""Wait until the waiter queue is empty."""
if cls.task_waiter:
sync_q = asyncio.Queue(0)
cls.task_waiter_q.put_nowait(["sync", sync_q])
await sync_q.get()
@classmethod
async def waiter_stop(cls):
"""Tell the waiter task to exit."""
if cls.task_waiter:
cls.task_waiter_q.put_nowait(["exit"])
await cls.task_waiter
cls.task_waiter = None
cls.task_waiter_q = None
@classmethod
async def async_sleep(cls, duration):
"""Implement task.sleep()."""
await asyncio.sleep(float(duration))
@classmethod
async def event_fire(cls, event_type, **kwargs):
"""Implement event.fire()."""
curr_task = asyncio.current_task()
if "context" in kwargs and isinstance(kwargs["context"], Context):
context = kwargs["context"]
del kwargs["context"]
else:
context = cls.task2context.get(curr_task, None)
cls.hass.bus.async_fire(event_type, kwargs, context=context)
@classmethod
def store_hass_context(cls, hass_context):
"""Store a context against the running task."""
curr_task = asyncio.current_task()
cls.task2context[curr_task] = hass_context
@classmethod
def task_unique_factory(cls, ctx):
"""Define and return task.unique() for this context."""
async def task_unique(name, kill_me=False):
"""Implement task.unique()."""
name = f"{ctx.get_global_ctx_name()}.{name}"
curr_task = asyncio.current_task()
if name in cls.unique_name2task:
task = cls.unique_name2task[name]
if kill_me:
if task != curr_task:
#
# it seems we can't cancel ourselves, so we
# tell the reaper task to cancel us
#
cls.reaper_cancel(curr_task)
# wait to be canceled
await asyncio.sleep(100000)
elif task != curr_task and task in cls.our_tasks:
# only cancel tasks if they are ones we started
cls.reaper_cancel(task)
if curr_task in cls.our_tasks:
if name in cls.unique_name2task:
task = cls.unique_name2task[name]
if task in cls.unique_task2name:
cls.unique_task2name[task].discard(name)
cls.unique_name2task[name] = curr_task
if curr_task not in cls.unique_task2name:
cls.unique_task2name[curr_task] = set()
cls.unique_task2name[curr_task].add(name)
return task_unique
@classmethod
async def user_task_cancel(cls, task=None):
"""Implement task.cancel()."""
do_sleep = False
if not task:
task = asyncio.current_task()
do_sleep = True
if task not in cls.our_tasks:
raise TypeError(f"{task} is not a user-started task")
cls.reaper_cancel(task)
if do_sleep:
# wait to be canceled
await asyncio.sleep(100000)
@classmethod
async def user_task_current_task(cls):
"""Implement task.current_task()."""
return asyncio.current_task()
@classmethod
def task_name2id_factory(cls, ctx):
"""Define and return task.name2id() for this context."""
def user_task_name2id(name=None):
"""Implement task.name2id()."""
prefix = f"{ctx.get_global_ctx_name()}."
if name is None:
ret = {}
for task_name, task_id in cls.unique_name2task.items():
if task_name.startswith(prefix):
ret[task_name[len(prefix) :]] = task_id
return ret
if prefix + name in cls.unique_name2task:
return cls.unique_name2task[prefix + name]
raise NameError(f"task name '{name}' is unknown")
return user_task_name2id
@classmethod
async def user_task_wait(cls, aws, **kwargs):
"""Implement task.wait()."""
return await asyncio.wait(aws, **kwargs)
@classmethod
def user_task_remove_done_callback(cls, task, callback):
"""Implement task.remove_done_callback()."""
cls.task2cb[task]["cb"].pop(callback, None)
@classmethod
def unique_name_used(cls, ctx, name):
"""Return whether the current unique name is in use."""
name = f"{ctx.get_global_ctx_name()}.{name}"
return name in cls.unique_name2task
@classmethod
def service_has_service(cls, domain, name):
"""Implement service.has_service()."""
return cls.hass.services.has_service(domain, name)
@classmethod
async def service_call(cls, domain, name, **kwargs):
"""Implement service.call()."""
curr_task = asyncio.current_task()
hass_args = {}
for keyword, typ, default in [
("context", [Context], cls.task2context.get(curr_task, None)),
("blocking", [bool], None),
("return_response", [bool], None),
]:
if keyword in kwargs and type(kwargs[keyword]) in typ:
hass_args[keyword] = kwargs.pop(keyword)
elif default:
hass_args[keyword] = default
return await cls.hass_services_async_call(domain, name, kwargs, **hass_args)
@classmethod
async def service_completions(cls, root):
"""Return possible completions of HASS services."""
words = set()
services = cls.hass.services.async_services()
num_period = root.count(".")
if num_period == 1:
domain, svc_root = root.split(".")
if domain in services:
words |= {f"{domain}.{svc}" for svc in services[domain] if svc.lower().startswith(svc_root)}
elif num_period == 0:
words |= {domain for domain in services if domain.lower().startswith(root)}
return words
@classmethod
async def func_completions(cls, root):
"""Return possible completions of functions."""
funcs = {**cls.functions, **cls.ast_functions}
words = {name for name in funcs if name.lower().startswith(root)}
return words
@classmethod
def register(cls, funcs):
"""Register functions to be available for calling."""
cls.functions.update(funcs)
@classmethod
def register_ast(cls, funcs):
"""Register functions that need ast context to be available for calling."""
cls.ast_functions.update(funcs)
@classmethod
def install_ast_funcs(cls, ast_ctx):
"""Install ast functions into the local symbol table."""
sym_table = {name: func(ast_ctx) for name, func in cls.ast_functions.items()}
ast_ctx.set_local_sym_table(sym_table)
@classmethod
def get(cls, name):
"""Lookup a function locally and then as a service."""
func = cls.functions.get(name, None)
if func:
return func
name_parts = name.split(".")
if len(name_parts) != 2:
return None
domain, service = name_parts
if not cls.service_has_service(domain, service):
return None
def service_call_factory(domain, service):
async def service_call(*args, **kwargs):
curr_task = asyncio.current_task()
hass_args = {}
for keyword, typ, default in [
("context", [Context], cls.task2context.get(curr_task, None)),
("blocking", [bool], None),
("return_response", [bool], None),
]:
if keyword in kwargs and type(kwargs[keyword]) in typ:
hass_args[keyword] = kwargs.pop(keyword)
elif default:
hass_args[keyword] = default
if len(args) != 0:
raise TypeError(f"service {domain}.{service} takes only keyword arguments")
return await cls.hass_services_async_call(domain, service, kwargs, **hass_args)
return service_call
return service_call_factory(domain, service)
@classmethod
async def hass_services_async_call(cls, domain, service, kwargs, **hass_args):
"""Call a hass async service."""
if SERVICE_RESPONSE_ONLY is None:
# backwards compatibility < 2023.7
await cls.hass.services.async_call(domain, service, kwargs, **hass_args)
else:
# allow service responses >= 2023.7
if (
"return_response" in hass_args
and hass_args["return_response"]
and "blocking" not in hass_args
):
hass_args["blocking"] = True
elif (
"return_response" not in hass_args
and cls.hass.services.supports_response(domain, service) == SERVICE_RESPONSE_ONLY
):
hass_args["return_response"] = True
if "blocking" not in hass_args:
hass_args["blocking"] = True
return await cls.hass.services.async_call(domain, service, kwargs, **hass_args)
@classmethod
async def run_coro(cls, coro, ast_ctx=None):
"""Run coroutine task and update unique task on start and exit."""
#
# Add a placeholder for the new task so we know it's one we started
#
task: asyncio.Task = None
try:
task = asyncio.current_task()
cls.our_tasks.add(task)
if ast_ctx is not None:
cls.task_done_callback_ctx(task, ast_ctx)
result = await coro
return result
except asyncio.CancelledError:
raise
except Exception:
_LOGGER.error("run_coro: got exception %s", traceback.format_exc(-1))
finally:
if task in cls.task2cb:
for callback, info in cls.task2cb[task]["cb"].items():
ast_ctx, args, kwargs = info
await ast_ctx.call_func(callback, None, *args, **kwargs)
if ast_ctx.get_exception_obj():
ast_ctx.get_logger().error(ast_ctx.get_exception_long())
break
if task in cls.unique_task2name:
for name in cls.unique_task2name[task]:
del cls.unique_name2task[name]
del cls.unique_task2name[task]
cls.task2context.pop(task, None)
cls.task2cb.pop(task, None)
cls.our_tasks.discard(task)
@classmethod
def create_task(cls, coro, ast_ctx=None):
"""Create a new task that runs a coroutine."""
return cls.hass.loop.create_task(cls.run_coro(coro, ast_ctx=ast_ctx))
@classmethod
def service_register(
cls, global_ctx_name, domain, service, callback, supports_response=SERVICE_RESPONSE_NONE
):
"""Register a new service callback."""
key = f"{domain}.{service}"
if key not in cls.service_cnt:
cls.service_cnt[key] = 0
if key not in cls.service2global_ctx:
cls.service2global_ctx[key] = global_ctx_name
if cls.service2global_ctx[key] != global_ctx_name:
raise ValueError(
f"{global_ctx_name}: can't register service {key}; already defined in {cls.service2global_ctx[key]}"
)
cls.service_cnt[key] += 1
if SERVICE_RESPONSE_ONLY is None:
# backwards compatibility < 2023.7
cls.hass.services.async_register(domain, service, callback)
else:
# allow service responses >= 2023.7
cls.hass.services.async_register(domain, service, callback, supports_response=supports_response)
@classmethod
def service_remove(cls, global_ctx_name, domain, service):
"""Remove a service callback."""
key = f"{domain}.{service}"
if cls.service_cnt.get(key, 0) > 1:
cls.service_cnt[key] -= 1
return
cls.service_cnt[key] = 0
cls.hass.services.async_remove(domain, service)
cls.service2global_ctx.pop(key, None)
@classmethod
def task_done_callback_ctx(cls, task, ast_ctx):
"""Set the ast_ctx for a task, which is needed for done callbacks."""
if task not in cls.task2cb or "ctx" not in cls.task2cb[task]:
cls.task2cb[task] = {"ctx": ast_ctx, "cb": {}}
@classmethod
def task_add_done_callback(cls, task, ast_ctx, callback, *args, **kwargs):
"""Add a done callback to the given task."""
if ast_ctx is None:
ast_ctx = cls.task2cb[task]["ctx"]
cls.task2cb[task]["cb"][callback] = [ast_ctx, args, kwargs]

View File

@@ -0,0 +1,352 @@
"""Global context handling."""
import logging
import os
from types import ModuleType
from typing import Any, Callable, Dict, List, Optional, Set, Union
from homeassistant.config_entries import ConfigEntry
from .const import CONF_HASS_IS_GLOBAL, CONFIG_ENTRY, DOMAIN, FOLDER, LOGGER_PATH
from .eval import AstEval, EvalFunc
from .function import Function
from .trigger import TrigInfo
_LOGGER = logging.getLogger(LOGGER_PATH + ".global_ctx")
class GlobalContext:
"""Define class for global variables and trigger context."""
def __init__(
self,
name,
global_sym_table: Dict[str, Any] = None,
manager=None,
rel_import_path: str = None,
app_config: Dict[str, Any] = None,
source: str = None,
mtime: float = None,
) -> None:
"""Initialize GlobalContext."""
self.name: str = name
self.global_sym_table: Dict[str, Any] = global_sym_table if global_sym_table else {}
self.triggers: Set[EvalFunc] = set()
self.triggers_delay_start: Set[EvalFunc] = set()
self.logger: logging.Logger = logging.getLogger(LOGGER_PATH + "." + name)
self.manager: GlobalContextMgr = manager
self.auto_start: bool = False
self.module: ModuleType = None
self.rel_import_path: str = rel_import_path
self.source: str = source
self.file_path: str = None
self.mtime: float = mtime
self.app_config: Dict[str, Any] = app_config
self.imports: Set[str] = set()
config_entry: ConfigEntry = Function.hass.data.get(DOMAIN, {}).get(CONFIG_ENTRY, {})
if config_entry.data.get(CONF_HASS_IS_GLOBAL, False):
#
# expose hass as a global variable if configured
#
self.global_sym_table["hass"] = Function.hass
if app_config:
self.global_sym_table["pyscript.app_config"] = app_config.copy()
def trigger_register(self, func: EvalFunc) -> bool:
"""Register a trigger function; return True if start now."""
self.triggers.add(func)
if self.auto_start:
return True
self.triggers_delay_start.add(func)
return False
def trigger_unregister(self, func: EvalFunc) -> None:
"""Unregister a trigger function."""
self.triggers.discard(func)
self.triggers_delay_start.discard(func)
def set_auto_start(self, auto_start: bool) -> None:
"""Set the auto-start flag."""
self.auto_start = auto_start
def start(self) -> None:
"""Start any unstarted triggers."""
for func in self.triggers_delay_start:
func.trigger_start()
self.triggers_delay_start = set()
def stop(self) -> None:
"""Stop all triggers and auto_start."""
for func in self.triggers:
func.trigger_stop()
self.triggers = set()
self.triggers_delay_start = set()
self.set_auto_start(False)
def get_name(self) -> str:
"""Return the global context name."""
return self.name
def set_logger_name(self, name) -> None:
"""Set the global context logging name."""
self.logger = logging.getLogger(LOGGER_PATH + "." + name)
def get_global_sym_table(self) -> Dict[str, Any]:
"""Return the global symbol table."""
return self.global_sym_table
def get_source(self) -> str:
"""Return the source code."""
return self.source
def get_app_config(self) -> Dict[str, Any]:
"""Return the app config."""
return self.app_config
def get_mtime(self) -> float:
"""Return the mtime."""
return self.mtime
def get_file_path(self) -> str:
"""Return the file path."""
return self.file_path
def get_imports(self) -> Set[str]:
"""Return the imports."""
return self.imports
def get_trig_info(self, name: str, trig_args: Dict[str, Any]) -> TrigInfo:
"""Return a new trigger info instance with the given args."""
return TrigInfo(name, trig_args, self)
async def module_import(self, module_name: str, import_level: int) -> List[Optional[str]]:
"""Import a pyscript module from the pyscript/modules or apps folder."""
pyscript_dir = Function.hass.config.path(FOLDER)
module_path = module_name.replace(".", "/")
file_paths = []
def find_first_file(file_paths: List[Set[str]]) -> List[Optional[Union[str, ModuleType]]]:
for ctx_name, path, rel_path in file_paths:
abs_path = os.path.join(pyscript_dir, path)
if os.path.isfile(abs_path):
return [ctx_name, abs_path, rel_path]
return None
#
# first build a list of potential import files
#
if import_level > 0:
if self.rel_import_path is None:
raise ImportError("attempted relative import with no known parent package")
path = self.rel_import_path
if path.endswith("/__init__"):
path = os.path.dirname(path)
ctx_name = self.name
for _ in range(import_level - 1):
path = os.path.dirname(path)
idx = ctx_name.rfind(".")
if path.find("/") < 0 or idx < 0:
raise ImportError("attempted relative import above parent package")
ctx_name = ctx_name[0:idx]
ctx_name += f".{module_name}"
module_info = [ctx_name, f"{path}/{module_path}.py", path]
path += f"/{module_path}"
file_paths.append([ctx_name, f"{path}/__init__.py", path])
file_paths.append(module_info)
module_name = ctx_name[ctx_name.find(".") + 1 :]
else:
if self.rel_import_path is not None and self.rel_import_path.startswith("apps/"):
ctx_name = f"apps.{module_name}"
file_paths.append([ctx_name, f"apps/{module_path}/__init__.py", f"apps/{module_path}"])
file_paths.append([ctx_name, f"apps/{module_path}.py", f"apps/{module_path}"])
ctx_name = f"modules.{module_name}"
file_paths.append([ctx_name, f"modules/{module_path}/__init__.py", f"modules/{module_path}"])
file_paths.append([ctx_name, f"modules/{module_path}.py", None])
#
# now see if we have loaded it already
#
for ctx_name, _, _ in file_paths:
mod_ctx = self.manager.get(ctx_name)
if mod_ctx and mod_ctx.module:
self.imports.add(mod_ctx.get_name())
return [mod_ctx.module, None]
#
# not loaded already, so try to find and import it
#
file_info = await Function.hass.async_add_executor_job(find_first_file, file_paths)
if not file_info:
return [None, None]
[ctx_name, file_path, rel_import_path] = file_info
mod = ModuleType(module_name)
global_ctx = GlobalContext(
ctx_name, global_sym_table=mod.__dict__, manager=self.manager, rel_import_path=rel_import_path
)
global_ctx.set_auto_start(True)
_, error_ctx = await self.manager.load_file(global_ctx, file_path)
if error_ctx:
_LOGGER.error(
"module_import: failed to load module %s, ctx = %s, path = %s",
module_name,
ctx_name,
file_path,
)
return [None, error_ctx]
global_ctx.module = mod
self.imports.add(ctx_name)
return [mod, None]
class GlobalContextMgr:
"""Define class for all global contexts."""
#
# map of context names to contexts
#
contexts = {}
#
# sequence number for sessions
#
name_seq = 0
def __init__(self) -> None:
"""Report an error if GlobalContextMgr in instantiated."""
_LOGGER.error("GlobalContextMgr class is not meant to be instantiated")
@classmethod
def init(cls) -> None:
"""Initialize GlobalContextMgr."""
def get_global_ctx_factory(ast_ctx: AstEval) -> Callable[[], str]:
"""Generate a pyscript.get_global_ctx() function with given ast_ctx."""
async def get_global_ctx():
return ast_ctx.get_global_ctx_name()
return get_global_ctx
def list_global_ctx_factory(ast_ctx: AstEval) -> Callable[[], List[str]]:
"""Generate a pyscript.list_global_ctx() function with given ast_ctx."""
async def list_global_ctx():
ctx_names = set(cls.contexts.keys())
curr_ctx_name = ast_ctx.get_global_ctx_name()
ctx_names.discard(curr_ctx_name)
return [curr_ctx_name] + sorted(sorted(ctx_names))
return list_global_ctx
def set_global_ctx_factory(ast_ctx: AstEval) -> Callable[[str], None]:
"""Generate a pyscript.set_global_ctx() function with given ast_ctx."""
async def set_global_ctx(name):
global_ctx = cls.get(name)
if global_ctx is None:
raise NameError(f"global context '{name}' does not exist")
ast_ctx.set_global_ctx(global_ctx)
ast_ctx.set_logger_name(global_ctx.name)
return set_global_ctx
ast_funcs = {
"pyscript.get_global_ctx": get_global_ctx_factory,
"pyscript.list_global_ctx": list_global_ctx_factory,
"pyscript.set_global_ctx": set_global_ctx_factory,
}
Function.register_ast(ast_funcs)
@classmethod
def get(cls, name: str) -> Optional[str]:
"""Return the GlobalContext given a name."""
return cls.contexts.get(name, None)
@classmethod
def set(cls, name: str, global_ctx: GlobalContext) -> None:
"""Save the GlobalContext by name."""
cls.contexts[name] = global_ctx
@classmethod
def items(cls) -> List[Set[Union[str, GlobalContext]]]:
"""Return all the global context items."""
return sorted(cls.contexts.items())
@classmethod
def delete(cls, name: str) -> None:
"""Delete the given GlobalContext."""
if name in cls.contexts:
global_ctx = cls.contexts[name]
global_ctx.stop()
del cls.contexts[name]
@classmethod
def new_name(cls, root: str) -> str:
"""Find a unique new name by appending a sequence number to root."""
while True:
name = f"{root}{cls.name_seq}"
cls.name_seq += 1
if name not in cls.contexts:
return name
@classmethod
async def load_file(
cls, global_ctx: GlobalContext, file_path: str, source: str = None, reload: bool = False
) -> Set[Union[bool, AstEval]]:
"""Load, parse and run the given script file; returns error ast_ctx on error, or None if ok."""
mtime = None
if source is None:
def read_file(path: str) -> Set[Union[str, float]]:
try:
with open(path, encoding="utf-8") as file_desc:
source = file_desc.read()
return source, os.path.getmtime(path)
except Exception as exc:
_LOGGER.error("%s", exc)
return None, 0
source, mtime = await Function.hass.async_add_executor_job(read_file, file_path)
if source is None:
return False, None
ctx_curr = cls.get(global_ctx.get_name())
if ctx_curr:
# stop triggers and destroy old global context
ctx_curr.stop()
cls.delete(global_ctx.get_name())
#
# create new ast eval context and parse source file
#
ast_ctx = AstEval(global_ctx.get_name(), global_ctx)
Function.install_ast_funcs(ast_ctx)
if not ast_ctx.parse(source, filename=file_path):
exc = ast_ctx.get_exception_long()
ast_ctx.get_logger().error(exc)
global_ctx.stop()
return False, ast_ctx
await ast_ctx.eval()
exc = ast_ctx.get_exception_long()
if exc is not None:
ast_ctx.get_logger().error(exc)
global_ctx.stop()
return False, ast_ctx
global_ctx.source = source
global_ctx.file_path = file_path
if mtime is not None:
global_ctx.mtime = mtime
cls.set(global_ctx.get_name(), global_ctx)
_LOGGER.info("%s %s", "Reloaded" if reload else "Loaded", file_path)
return True, None

View File

@@ -0,0 +1,921 @@
"""Pyscript Jupyter kernel."""
#
# Based on simple_kernel.py by Doug Blank <doug.blank@gmail.com>
# https://github.com/dsblank/simple_kernel
# license: public domain
# Thanks Doug!
#
import asyncio
import datetime
import hashlib
import hmac
import json
import logging
import logging.handlers
import re
from struct import pack, unpack
import traceback
import uuid
from .const import LOGGER_PATH
from .function import Function
from .global_ctx import GlobalContextMgr
from .state import State
_LOGGER = logging.getLogger(LOGGER_PATH + ".jupyter_kernel")
# Globals:
DELIM = b"<IDS|MSG>"
def msg_id():
"""Return a new uuid for message id."""
return str(uuid.uuid4())
def str_to_bytes(string):
"""Encode a string in bytes."""
return string.encode("utf-8")
class KernelBufferingHandler(logging.handlers.BufferingHandler):
"""Memory-based handler for logging; send via stdout queue."""
def __init__(self, housekeep_q):
"""Initialize KernelBufferingHandler instance."""
super().__init__(0)
self.housekeep_q = housekeep_q
def flush(self):
"""Flush is a no-op."""
def shouldFlush(self, record):
"""Write the buffer to the housekeeping queue."""
try:
self.housekeep_q.put_nowait(["stdout", self.format(record)])
except asyncio.QueueFull:
_LOGGER.error("housekeep_q unexpectedly full")
################################################################
class ZmqSocket:
"""Defines a minimal implementation of a small subset of ZMQ."""
#
# This allows pyscript to work with Jupyter without the real zmq
# and pyzmq packages, which might not be available or easy to
# install on the wide set of HASS platforms.
#
def __init__(self, reader, writer, sock_type):
"""Initialize a ZMQ socket with the given type and reader/writer streams."""
self.writer = writer
self.reader = reader
self.type = sock_type
async def read_bytes(self, num_bytes):
"""Read bytes from ZMQ socket."""
data = b""
while len(data) < num_bytes:
new_data = await self.reader.read(num_bytes - len(data))
if len(new_data) == 0:
raise EOFError
data += new_data
return data
async def write_bytes(self, raw_msg):
"""Write bytes to ZMQ socket."""
self.writer.write(raw_msg)
await self.writer.drain()
async def handshake(self):
"""Do initial greeting handshake on a new ZMQ connection."""
await self.write_bytes(b"\xff\x00\x00\x00\x00\x00\x00\x00\x01\x7f")
_ = await self.read_bytes(10)
# _LOGGER.debug(f"handshake: got initial greeting {greeting}")
await self.write_bytes(b"\x03")
_ = await self.read_bytes(1)
await self.write_bytes(b"\x00" + "NULL".encode() + b"\x00" * 16 + b"\x00" + b"\x00" * 31)
_ = await self.read_bytes(53)
# _LOGGER.debug(f"handshake: got rest of greeting {greeting}")
params = [["Socket-Type", self.type]]
if self.type == "ROUTER":
params.append(["Identity", ""])
await self.send_cmd("READY", params)
async def recv(self, multipart=False):
"""Receive a message from ZMQ socket."""
parts = []
while 1:
cmd = (await self.read_bytes(1))[0]
if cmd & 0x2:
msg_len = unpack(">Q", await self.read_bytes(8))[0]
else:
msg_len = (await self.read_bytes(1))[0]
msg_body = await self.read_bytes(msg_len)
if cmd & 0x4:
# _LOGGER.debug(f"recv: got cmd {msg_body}")
cmd_len = msg_body[0]
cmd = msg_body[1 : cmd_len + 1]
msg_body = msg_body[cmd_len + 1 :]
params = []
while len(msg_body) > 0:
param_len = msg_body[0]
param = msg_body[1 : param_len + 1]
msg_body = msg_body[param_len + 1 :]
value_len = unpack(">L", msg_body[0:4])[0]
value = msg_body[4 : 4 + value_len]
msg_body = msg_body[4 + value_len :]
params.append([param, value])
# _LOGGER.debug(f"recv: got cmd={cmd}, params={params}")
else:
parts.append(msg_body)
if cmd in (0x0, 0x2):
# _LOGGER.debug(f"recv: got msg {parts}")
if not multipart:
return b"".join(parts)
return parts
async def recv_multipart(self):
"""Receive a multipart message from ZMQ socket."""
return await self.recv(multipart=True)
async def send_cmd(self, cmd, params):
"""Send a command over ZMQ socket."""
raw_msg = bytearray([len(cmd)]) + cmd.encode()
for param in params:
raw_msg += bytearray([len(param[0])]) + param[0].encode()
raw_msg += pack(">L", len(param[1])) + param[1].encode()
len_msg = len(raw_msg)
if len_msg <= 255:
raw_msg = bytearray([0x4, len_msg]) + raw_msg
else:
raw_msg = bytearray([0x6]) + pack(">Q", len_msg) + raw_msg
# _LOGGER.debug(f"send_cmd: sending {raw_msg}")
await self.write_bytes(raw_msg)
async def send(self, msg):
"""Send a message over ZMQ socket."""
len_msg = len(msg)
if len_msg <= 255:
raw_msg = bytearray([0x1, 0x0, 0x0, len_msg]) + msg
else:
raw_msg = bytearray([0x1, 0x0, 0x2]) + pack(">Q", len_msg) + msg
# _LOGGER.debug(f"send: sending {raw_msg}")
await self.write_bytes(raw_msg)
async def send_multipart(self, parts):
"""Send multipart messages over ZMQ socket."""
raw_msg = b""
for i, part in enumerate(parts):
len_part = len(part)
cmd = 0x1 if i < len(parts) - 1 else 0x0
if len_part <= 255:
raw_msg += bytearray([cmd, len_part]) + part
else:
raw_msg += bytearray([cmd + 2]) + pack(">Q", len_part) + part
# _LOGGER.debug(f"send_multipart: sending {raw_msg}")
await self.write_bytes(raw_msg)
def close(self):
"""Close the ZMQ socket."""
self.writer.close()
##########################################
class Kernel:
"""Define a Jupyter Kernel class."""
def __init__(self, config, ast_ctx, global_ctx, global_ctx_name):
"""Initialize a Kernel object, one instance per session."""
self.config = config.copy()
self.global_ctx = global_ctx
self.global_ctx_name = global_ctx_name
self.ast_ctx = ast_ctx
self.secure_key = str_to_bytes(self.config["key"])
self.no_connect_timeout = self.config.get("no_connect_timeout", 30)
self.signature_schemes = {"hmac-sha256": hashlib.sha256}
self.auth = hmac.HMAC(
self.secure_key,
digestmod=self.signature_schemes[self.config["signature_scheme"]],
)
self.execution_count = 1
self.engine_id = str(uuid.uuid4())
self.heartbeat_server = None
self.iopub_server = None
self.control_server = None
self.stdin_server = None
self.shell_server = None
self.heartbeat_port = None
self.iopub_port = None
self.control_port = None
self.stdin_port = None
self.shell_port = None
# this should probably be a configuration parameter
self.avail_port = 50321
# there can be multiple iopub subscribers, with corresponding tasks
self.iopub_socket = set()
self.tasks = {}
self.task_cnt = 0
self.task_cnt_max = 0
self.session_cleanup_callback = None
self.housekeep_q = asyncio.Queue(0)
self.parent_header = None
#
# we create a logging handler so that output from the log functions
# gets delivered back to Jupyter as stdout
#
self.console = KernelBufferingHandler(self.housekeep_q)
self.console.setLevel(logging.DEBUG)
# set a format which is just the message
formatter = logging.Formatter("%(message)s")
self.console.setFormatter(formatter)
# match alphanum or "." at end of line
self.completion_re = re.compile(r".*?([\w.]*)$", re.DOTALL)
# see if line ends in a ":", with optional whitespace and comment
# note: this doesn't detect if we are inside a quoted string...
self.colon_end_re = re.compile(r".*: *(#.*)?$")
def msg_sign(self, msg_lst):
"""Sign a message with a secure signature."""
auth_hmac = self.auth.copy()
for msg in msg_lst:
auth_hmac.update(msg)
return str_to_bytes(auth_hmac.hexdigest())
def deserialize_wire_msg(self, wire_msg):
"""Split the routing prefix and message frames from a message on the wire."""
delim_idx = wire_msg.index(DELIM)
identities = wire_msg[:delim_idx]
m_signature = wire_msg[delim_idx + 1]
msg_frames = wire_msg[delim_idx + 2 :]
def decode(msg):
return json.loads(msg.decode("utf-8"))
msg = {}
msg["header"] = decode(msg_frames[0])
msg["parent_header"] = decode(msg_frames[1])
msg["metadata"] = decode(msg_frames[2])
msg["content"] = decode(msg_frames[3])
check_sig = self.msg_sign(msg_frames)
if check_sig != m_signature:
_LOGGER.error(
"signature mismatch: check_sig=%s, m_signature=%s, wire_msg=%s",
check_sig,
m_signature,
wire_msg,
)
raise ValueError("Signatures do not match")
return identities, msg
def new_header(self, msg_type):
"""Make a new header."""
return {
"date": datetime.datetime.now().isoformat(),
"msg_id": msg_id(),
"username": "kernel",
"session": self.engine_id,
"msg_type": msg_type,
"version": "5.3",
}
async def send(
self,
stream,
msg_type,
content=None,
parent_header=None,
metadata=None,
identities=None,
):
"""Send message to the Jupyter client."""
header = self.new_header(msg_type)
def encode(msg):
return str_to_bytes(json.dumps(msg))
msg_lst = [
encode(header),
encode(parent_header if parent_header else {}),
encode(metadata if metadata else {}),
encode(content if content else {}),
]
signature = self.msg_sign(msg_lst)
parts = [DELIM, signature, msg_lst[0], msg_lst[1], msg_lst[2], msg_lst[3]]
if identities:
parts = identities + parts
if stream:
# _LOGGER.debug("send %s: %s", msg_type, parts)
for this_stream in stream if isinstance(stream, set) else {stream}:
await this_stream.send_multipart(parts)
async def shell_handler(self, shell_socket, wire_msg):
"""Handle shell messages."""
identities, msg = self.deserialize_wire_msg(wire_msg)
# _LOGGER.debug("shell received %s: %s", msg.get('header', {}).get('msg_type', 'UNKNOWN'), msg)
self.parent_header = msg["header"]
content = {
"execution_state": "busy",
}
await self.send(self.iopub_socket, "status", content, parent_header=msg["header"])
if msg["header"]["msg_type"] == "execute_request":
content = {
"execution_count": self.execution_count,
"code": msg["content"]["code"],
}
await self.send(self.iopub_socket, "execute_input", content, parent_header=msg["header"])
result = None
code = msg["content"]["code"]
#
# replace VSCode initialization code, which depend on iPython % extensions
#
if code.startswith("%config "):
code = "None"
if code.startswith("_rwho_ls = %who_ls"):
code = "print([])"
self.global_ctx.set_auto_start(False)
self.ast_ctx.parse(code)
exc = self.ast_ctx.get_exception_obj()
if exc is None:
result = await self.ast_ctx.eval()
exc = self.ast_ctx.get_exception_obj()
await Function.waiter_sync()
self.global_ctx.set_auto_start(True)
self.global_ctx.start()
if exc:
traceback_mesg = self.ast_ctx.get_exception_long().split("\n")
metadata = {
"dependencies_met": True,
"engine": self.engine_id,
"status": "error",
"started": datetime.datetime.now().isoformat(),
}
content = {
"execution_count": self.execution_count,
"status": "error",
"ename": type(exc).__name__, # Exception name, as a string
"evalue": str(exc), # Exception value, as a string
"traceback": traceback_mesg,
}
_LOGGER.debug("Executing '%s' got exception: %s", code, content)
await self.send(
shell_socket,
"execute_reply",
content,
metadata=metadata,
parent_header=msg["header"],
identities=identities,
)
del content["execution_count"], content["status"]
await self.send(self.iopub_socket, "error", content, parent_header=msg["header"])
content = {
"execution_state": "idle",
}
await self.send(self.iopub_socket, "status", content, parent_header=msg["header"])
if msg["content"].get("store_history", True):
self.execution_count += 1
return
# if True or isinstance(self.ast_ctx.ast, ast.Expr):
_LOGGER.debug("Executing: '%s' got result %s", code, result)
if result is not None:
content = {
"execution_count": self.execution_count,
"data": {"text/plain": repr(result)},
"metadata": {},
}
await self.send(
self.iopub_socket,
"execute_result",
content,
parent_header=msg["header"],
)
metadata = {
"dependencies_met": True,
"engine": self.engine_id,
"status": "ok",
"started": datetime.datetime.now().isoformat(),
}
content = {
"status": "ok",
"execution_count": self.execution_count,
"user_variables": {},
"payload": [],
"user_expressions": {},
}
await self.send(
shell_socket,
"execute_reply",
content,
metadata=metadata,
parent_header=msg["header"],
identities=identities,
)
if msg["content"].get("store_history", True):
self.execution_count += 1
#
# Make sure stdout gets sent before set report execution_state idle on iopub,
# otherwise VSCode doesn't display stdout. We do a handshake with the
# housekeep task to ensure any queued messages get processed.
#
handshake_q = asyncio.Queue(0)
await self.housekeep_q.put(["handshake", handshake_q, 0])
await handshake_q.get()
elif msg["header"]["msg_type"] == "kernel_info_request":
content = {
"protocol_version": "5.3",
"ipython_version": [1, 1, 0, ""],
"language_version": [0, 0, 1],
"language": "python",
"implementation": "python",
"implementation_version": "3.7",
"language_info": {
"name": "python",
"version": "1.0",
"mimetype": "",
"file_extension": ".py",
"codemirror_mode": "",
"nbconvert_exporter": "",
},
"banner": "",
}
await self.send(
shell_socket,
"kernel_info_reply",
content,
parent_header=msg["header"],
identities=identities,
)
elif msg["header"]["msg_type"] == "complete_request":
root = ""
words = set()
code = msg["content"]["code"]
posn = msg["content"]["cursor_pos"]
match = self.completion_re.match(code[0:posn].lower())
if match:
root = match[1].lower()
words = State.completions(root)
words = words.union(await Function.service_completions(root))
words = words.union(await Function.func_completions(root))
words = words.union(self.ast_ctx.completions(root))
# _LOGGER.debug(f"complete_request code={code}, posn={posn}, root={root}, words={words}")
content = {
"status": "ok",
"matches": sorted(list(words)),
"cursor_start": msg["content"]["cursor_pos"] - len(root),
"cursor_end": msg["content"]["cursor_pos"],
"metadata": {},
}
await self.send(
shell_socket,
"complete_reply",
content,
parent_header=msg["header"],
identities=identities,
)
elif msg["header"]["msg_type"] == "is_complete_request":
code = msg["content"]["code"]
self.ast_ctx.parse(code)
exc = self.ast_ctx.get_exception_obj()
# determine indent of last line
indent = 0
i = code.rfind("\n")
if i >= 0:
while i + 1 < len(code) and code[i + 1] == " ":
i += 1
indent += 1
if exc is None:
if indent == 0:
content = {
# One of 'complete', 'incomplete', 'invalid', 'unknown'
"status": "complete",
# If status is 'incomplete', indent should contain the characters to use
# to indent the next line. This is only a hint: frontends may ignore it
# and use their own autoindentation rules. For other statuses, this
# field does not exist.
# "indent": str,
}
else:
content = {
"status": "incomplete",
"indent": " " * indent,
}
else:
#
# if the syntax error is right at the end, then we label it incomplete,
# otherwise it's invalid
#
if "EOF while" in str(exc) or "expected an indented block" in str(exc):
# if error is at ":" then increase indent
if hasattr(exc, "lineno"):
line = code.split("\n")[exc.lineno - 1]
if self.colon_end_re.match(line):
indent += 4
content = {
"status": "incomplete",
"indent": " " * indent,
}
else:
content = {
"status": "invalid",
}
# _LOGGER.debug(f"is_complete_request code={code}, exc={exc}, content={content}")
await self.send(
shell_socket,
"is_complete_reply",
content,
parent_header=msg["header"],
identities=identities,
)
elif msg["header"]["msg_type"] == "comm_info_request":
content = {"comms": {}}
await self.send(
shell_socket,
"comm_info_reply",
content,
parent_header=msg["header"],
identities=identities,
)
elif msg["header"]["msg_type"] == "history_request":
content = {"history": []}
await self.send(
shell_socket,
"history_reply",
content,
parent_header=msg["header"],
identities=identities,
)
elif msg["header"]["msg_type"] in {"comm_open", "comm_msg", "comm_close"}:
# _LOGGER.debug(f"ignore {msg['header']['msg_type']} message ")
...
else:
_LOGGER.error("unknown msg_type: %s", msg["header"]["msg_type"])
content = {
"execution_state": "idle",
}
await self.send(self.iopub_socket, "status", content, parent_header=msg["header"])
async def control_listen(self, reader, writer):
"""Task that listens to control messages."""
try:
_LOGGER.debug("control_listen connected")
await self.housekeep_q.put(["register", "control", asyncio.current_task()])
control_socket = ZmqSocket(reader, writer, "ROUTER")
await control_socket.handshake()
while 1:
wire_msg = await control_socket.recv_multipart()
identities, msg = self.deserialize_wire_msg(wire_msg)
# _LOGGER.debug("control received %s: %s", msg.get('header', {}).get('msg_type', 'UNKNOWN'), msg)
if msg["header"]["msg_type"] == "shutdown_request":
content = {
"restart": False,
}
await self.send(
control_socket,
"shutdown_reply",
content,
parent_header=msg["header"],
identities=identities,
)
await self.housekeep_q.put(["shutdown"])
except asyncio.CancelledError:
raise
except (EOFError, ConnectionResetError):
_LOGGER.debug("control_listen got eof")
await self.housekeep_q.put(["unregister", "control", asyncio.current_task()])
control_socket.close()
except Exception as err:
_LOGGER.error("control_listen exception %s", err)
await self.housekeep_q.put(["shutdown"])
async def stdin_listen(self, reader, writer):
"""Task that listens to stdin messages."""
try:
_LOGGER.debug("stdin_listen connected")
await self.housekeep_q.put(["register", "stdin", asyncio.current_task()])
stdin_socket = ZmqSocket(reader, writer, "ROUTER")
await stdin_socket.handshake()
while 1:
_ = await stdin_socket.recv_multipart()
# _LOGGER.debug("stdin_listen received %s", _)
except asyncio.CancelledError:
raise
except (EOFError, ConnectionResetError):
_LOGGER.debug("stdin_listen got eof")
await self.housekeep_q.put(["unregister", "stdin", asyncio.current_task()])
stdin_socket.close()
except Exception:
_LOGGER.error("stdin_listen exception %s", traceback.format_exc(-1))
await self.housekeep_q.put(["shutdown"])
async def shell_listen(self, reader, writer):
"""Task that listens to shell messages."""
try:
_LOGGER.debug("shell_listen connected")
await self.housekeep_q.put(["register", "shell", asyncio.current_task()])
shell_socket = ZmqSocket(reader, writer, "ROUTER")
await shell_socket.handshake()
while 1:
msg = await shell_socket.recv_multipart()
await self.shell_handler(shell_socket, msg)
except asyncio.CancelledError:
shell_socket.close()
raise
except (EOFError, ConnectionResetError):
_LOGGER.debug("shell_listen got eof")
await self.housekeep_q.put(["unregister", "shell", asyncio.current_task()])
shell_socket.close()
except Exception:
_LOGGER.error("shell_listen exception %s", traceback.format_exc(-1))
await self.housekeep_q.put(["shutdown"])
async def heartbeat_listen(self, reader, writer):
"""Task that listens and responds to heart beat messages."""
try:
_LOGGER.debug("heartbeat_listen connected")
await self.housekeep_q.put(["register", "heartbeat", asyncio.current_task()])
heartbeat_socket = ZmqSocket(reader, writer, "REP")
await heartbeat_socket.handshake()
while 1:
msg = await heartbeat_socket.recv()
# _LOGGER.debug("heartbeat_listen: got %s", msg)
await heartbeat_socket.send(msg)
except asyncio.CancelledError:
raise
except (EOFError, ConnectionResetError):
_LOGGER.debug("heartbeat_listen got eof")
await self.housekeep_q.put(["unregister", "heartbeat", asyncio.current_task()])
heartbeat_socket.close()
except Exception:
_LOGGER.error("heartbeat_listen exception: %s", traceback.format_exc(-1))
await self.housekeep_q.put(["shutdown"])
async def iopub_listen(self, reader, writer):
"""Task that listens to iopub messages."""
try:
_LOGGER.debug("iopub_listen connected")
await self.housekeep_q.put(["register", "iopub", asyncio.current_task()])
iopub_socket = ZmqSocket(reader, writer, "PUB")
await iopub_socket.handshake()
self.iopub_socket.add(iopub_socket)
while 1:
_ = await iopub_socket.recv_multipart()
# _LOGGER.debug("iopub received %s", _)
except asyncio.CancelledError:
raise
except (EOFError, ConnectionResetError):
await self.housekeep_q.put(["unregister", "iopub", asyncio.current_task()])
iopub_socket.close()
self.iopub_socket.discard(iopub_socket)
_LOGGER.debug("iopub_listen got eof")
except Exception:
_LOGGER.error("iopub_listen exception %s", traceback.format_exc(-1))
await self.housekeep_q.put(["shutdown"])
async def housekeep_run(self):
"""Housekeeping, including closing servers after startup, and doing orderly shutdown."""
while True:
try:
msg = await self.housekeep_q.get()
if msg[0] == "stdout":
content = {"name": "stdout", "text": msg[1] + "\n"}
if self.iopub_socket:
await self.send(
self.iopub_socket,
"stream",
content,
parent_header=self.parent_header,
identities=[b"stream.stdout"],
)
elif msg[0] == "handshake":
await msg[1].put(msg[2])
elif msg[0] == "register":
if msg[1] not in self.tasks:
self.tasks[msg[1]] = set()
self.tasks[msg[1]].add(msg[2])
self.task_cnt += 1
self.task_cnt_max = max(self.task_cnt_max, self.task_cnt)
#
# now a couple of things are connected, call the session_cleanup_callback
#
if self.task_cnt > 1 and self.session_cleanup_callback:
self.session_cleanup_callback()
self.session_cleanup_callback = None
elif msg[0] == "unregister":
if msg[1] in self.tasks:
self.tasks[msg[1]].discard(msg[2])
self.task_cnt -= 1
#
# if there are no connection tasks left, then shutdown the kernel
#
if self.task_cnt == 0 and self.task_cnt_max >= 4:
asyncio.create_task(self.session_shutdown())
await asyncio.sleep(10000)
elif msg[0] == "shutdown":
asyncio.create_task(self.session_shutdown())
return
except asyncio.CancelledError:
raise
except Exception:
_LOGGER.error("housekeep task exception: %s", traceback.format_exc(-1))
async def startup_timeout(self):
"""Shut down the session if nothing connects after 30 seconds."""
await self.housekeep_q.put(["register", "startup_timeout", asyncio.current_task()])
await asyncio.sleep(self.no_connect_timeout)
if self.task_cnt_max <= 1:
#
# nothing started other than us, so shut down the session
#
_LOGGER.error("No connections to session %s; shutting down", self.global_ctx_name)
if self.session_cleanup_callback:
self.session_cleanup_callback()
self.session_cleanup_callback = None
await self.housekeep_q.put(["shutdown"])
await self.housekeep_q.put(["unregister", "startup_timeout", asyncio.current_task()])
async def start_one_server(self, callback):
"""Start a server by finding an available port."""
first_port = self.avail_port
for _ in range(2048):
try:
server = await asyncio.start_server(callback, "0.0.0.0", self.avail_port)
return server, self.avail_port
except OSError:
self.avail_port += 1
_LOGGER.error(
"unable to find an available port from %d to %d",
first_port,
self.avail_port - 1,
)
return None, None
def get_ports(self):
"""Return a dict of the port numbers this kernel session is listening to."""
return {
"iopub_port": self.iopub_port,
"hb_port": self.heartbeat_port,
"control_port": self.control_port,
"stdin_port": self.stdin_port,
"shell_port": self.shell_port,
}
def set_session_cleanup_callback(self, callback):
"""Set a cleanup callback which is called right after the session has started."""
self.session_cleanup_callback = callback
async def session_start(self):
"""Start the kernel session."""
self.ast_ctx.add_logger_handler(self.console)
_LOGGER.info("Starting session %s", self.global_ctx_name)
self.tasks["housekeep"] = {asyncio.create_task(self.housekeep_run())}
self.tasks["startup_timeout"] = {asyncio.create_task(self.startup_timeout())}
self.iopub_server, self.iopub_port = await self.start_one_server(self.iopub_listen)
self.heartbeat_server, self.heartbeat_port = await self.start_one_server(self.heartbeat_listen)
self.control_server, self.control_port = await self.start_one_server(self.control_listen)
self.stdin_server, self.stdin_port = await self.start_one_server(self.stdin_listen)
self.shell_server, self.shell_port = await self.start_one_server(self.shell_listen)
#
# For debugging, can use the real ZMQ library instead on certain sockets; comment out
# the corresponding asyncio.start_server() call above if you enable the ZMQ-based
# functions here. You can then turn of verbosity level 4 (-vvvv) in hass_pyscript_kernel.py
# to see all the byte data in case you need to debug the simple ZMQ implementation here.
# The two most important zmq functions are shown below.
#
# import zmq
# import zmq.asyncio
#
# def zmq_bind(socket, connection, port):
# """Bind a socket."""
# if port <= 0:
# return socket.bind_to_random_port(connection)
# # _LOGGER.debug(f"binding to %s:%s" % (connection, port))
# socket.bind("%s:%s" % (connection, port))
# return port
#
# zmq_ctx = zmq.asyncio.Context()
#
# ##########################################
# # Shell using real ZMQ for debugging:
# async def shell_listen_zmq():
# """Task that listens to shell messages using ZMQ."""
# try:
# _LOGGER.debug("shell_listen_zmq connected")
# connection = self.config["transport"] + "://" + self.config["ip"]
# shell_socket = zmq_ctx.socket(zmq.ROUTER)
# self.shell_port = zmq_bind(shell_socket, connection, -1)
# _LOGGER.debug("shell_listen_zmq connected")
# while 1:
# msg = await shell_socket.recv_multipart()
# await self.shell_handler(shell_socket, msg)
# except asyncio.CancelledError:
# raise
# except Exception:
# _LOGGER.error("shell_listen exception %s", traceback.format_exc(-1))
# await self.housekeep_q.put(["shutdown"])
#
# ##########################################
# # IOPub using real ZMQ for debugging:
# # IOPub/Sub:
# async def iopub_listen_zmq():
# """Task that listens to iopub messages using ZMQ."""
# try:
# _LOGGER.debug("iopub_listen_zmq connected")
# connection = self.config["transport"] + "://" + self.config["ip"]
# iopub_socket = zmq_ctx.socket(zmq.PUB)
# self.iopub_port = zmq_bind(self.iopub_socket, connection, -1)
# self.iopub_socket.add(iopub_socket)
# while 1:
# wire_msg = await iopub_socket.recv_multipart()
# _LOGGER.debug("iopub received %s", wire_msg)
# except asyncio.CancelledError:
# raise
# except EOFError:
# await self.housekeep_q.put(["shutdown"])
# _LOGGER.debug("iopub_listen got eof")
# except Exception as err:
# _LOGGER.error("iopub_listen exception %s", err)
# await self.housekeep_q.put(["shutdown"])
#
# self.tasks["shell"] = {asyncio.create_task(shell_listen_zmq())}
# self.tasks["iopub"] = {asyncio.create_task(iopub_listen_zmq())}
#
async def session_shutdown(self):
"""Shutdown the kernel session."""
if not self.iopub_server:
# already shutdown, so quit
return
GlobalContextMgr.delete(self.global_ctx_name)
self.ast_ctx.remove_logger_handler(self.console)
# logging.getLogger("homeassistant.components.pyscript.func.").removeHandler(self.console)
_LOGGER.info("Shutting down session %s", self.global_ctx_name)
for server in [
self.heartbeat_server,
self.control_server,
self.stdin_server,
self.shell_server,
self.iopub_server,
]:
if server:
server.close()
self.heartbeat_server = None
self.iopub_server = None
self.control_server = None
self.stdin_server = None
self.shell_server = None
for task_set in self.tasks.values():
for task in task_set:
try:
task.cancel()
await task
except asyncio.CancelledError:
pass
self.tasks = []
for sock in self.iopub_socket:
try:
sock.close()
except Exception as err:
_LOGGER.error("iopub socket close exception: %s", err)
self.iopub_socket = set()

View File

@@ -0,0 +1,45 @@
"""Describe logbook events."""
import logging
from homeassistant.core import callback
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
@callback
def async_describe_events(hass, async_describe_event): # type: ignore
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event): # type: ignore
"""Describe a logbook event."""
data = event.data
func_args = data.get("func_args", {})
ev_name = data.get("name", "unknown")
ev_entity_id = data.get("entity_id", "pyscript.unknown")
ev_trigger_type = func_args.get("trigger_type", "unknown")
if ev_trigger_type == "event":
ev_source = f"event {func_args.get('event_type', 'unknown event')}"
elif ev_trigger_type == "state":
ev_source = f"state change {func_args.get('var_name', 'unknown entity')} == {func_args.get('value', 'unknown value')}"
elif ev_trigger_type == "time":
ev_trigger_time = func_args.get("trigger_time", "unknown")
if ev_trigger_time is None:
ev_trigger_time = "startup"
ev_source = f"time {ev_trigger_time}"
else:
ev_source = ev_trigger_type
message = f"has been triggered by {ev_source}"
return {
"name": ev_name,
"message": message,
"source": ev_source,
"entity_id": ev_entity_id,
}
async_describe_event(DOMAIN, "pyscript_running", async_describe_logbook_event)

View File

@@ -0,0 +1,17 @@
{
"domain": "pyscript",
"name": "Pyscript Python scripting",
"codeowners": [
"@craigbarratt"
],
"config_flow": true,
"dependencies": [],
"documentation": "https://github.com/custom-components/pyscript",
"homekit": {},
"iot_class": "local_push",
"issue_tracker": "https://github.com/custom-components/pyscript/issues",
"requirements": ["croniter==2.0.2", "watchdog==2.3.1"],
"ssdp": [],
"version": "1.6.1",
"zeroconf": []
}

View File

@@ -0,0 +1,91 @@
"""Handles mqtt messages and notification."""
import json
import logging
from homeassistant.components import mqtt
from .const import LOGGER_PATH
_LOGGER = logging.getLogger(LOGGER_PATH + ".mqtt")
class Mqtt:
"""Define mqtt functions."""
#
# Global hass instance
#
hass = None
#
# notify message queues by mqtt message topic
#
notify = {}
notify_remove = {}
def __init__(self):
"""Warn on Mqtt instantiation."""
_LOGGER.error("Mqtt class is not meant to be instantiated")
@classmethod
def init(cls, hass):
"""Initialize Mqtt."""
cls.hass = hass
@classmethod
def mqtt_message_handler_maker(cls, subscribed_topic):
"""Closure for mqtt_message_handler."""
async def mqtt_message_handler(mqttmsg):
"""Listen for MQTT messages."""
func_args = {
"trigger_type": "mqtt",
"topic": mqttmsg.topic,
"payload": mqttmsg.payload,
"qos": mqttmsg.qos,
}
try:
func_args["payload_obj"] = json.loads(mqttmsg.payload)
except ValueError:
pass
await cls.update(subscribed_topic, func_args)
return mqtt_message_handler
@classmethod
async def notify_add(cls, topic, queue):
"""Register to notify for mqtt messages of given topic to be sent to queue."""
if topic not in cls.notify:
cls.notify[topic] = set()
_LOGGER.debug("mqtt.notify_add(%s) -> adding mqtt subscription", topic)
cls.notify_remove[topic] = await mqtt.async_subscribe(
cls.hass, topic, cls.mqtt_message_handler_maker(topic), encoding="utf-8", qos=0
)
cls.notify[topic].add(queue)
@classmethod
def notify_del(cls, topic, queue):
"""Unregister to notify for mqtt messages of given topic for given queue."""
if topic not in cls.notify or queue not in cls.notify[topic]:
return
cls.notify[topic].discard(queue)
if len(cls.notify[topic]) == 0:
cls.notify_remove[topic]()
_LOGGER.debug("mqtt.notify_del(%s) -> removing mqtt subscription", topic)
del cls.notify[topic]
del cls.notify_remove[topic]
@classmethod
async def update(cls, topic, func_args):
"""Deliver all notifications for an mqtt message on the given topic."""
_LOGGER.debug("mqtt.update(%s, %s, %s)", topic, vars, func_args)
if topic in cls.notify:
for queue in cls.notify[topic]:
await queue.put(["mqtt", func_args.copy()])

View File

@@ -0,0 +1,323 @@
"""Requirements helpers for pyscript."""
import glob
import logging
import os
import sys
from homeassistant.loader import bind_hass
from homeassistant.requirements import async_process_requirements
from .const import (
ATTR_INSTALLED_VERSION,
ATTR_SOURCES,
ATTR_VERSION,
CONF_ALLOW_ALL_IMPORTS,
CONF_INSTALLED_PACKAGES,
DOMAIN,
LOGGER_PATH,
REQUIREMENTS_FILE,
REQUIREMENTS_PATHS,
UNPINNED_VERSION,
)
if sys.version_info[:2] >= (3, 8):
from importlib.metadata import ( # pylint: disable=no-name-in-module,import-error
PackageNotFoundError,
version as installed_version,
)
else:
from importlib_metadata import ( # pylint: disable=import-error
PackageNotFoundError,
version as installed_version,
)
_LOGGER = logging.getLogger(LOGGER_PATH)
def get_installed_version(pkg_name):
"""Get installed version of package. Returns None if not found."""
try:
return installed_version(pkg_name)
except PackageNotFoundError:
return None
def update_unpinned_versions(package_dict):
"""Check for current installed version of each unpinned package."""
requirements_to_pop = []
for package in package_dict:
if package_dict[package] != UNPINNED_VERSION:
continue
package_dict[package] = get_installed_version(package)
if not package_dict[package]:
_LOGGER.error("%s wasn't able to be installed", package)
requirements_to_pop.append(package)
for package in requirements_to_pop:
package_dict.pop(package)
return package_dict
@bind_hass
def process_all_requirements(pyscript_folder, requirements_paths, requirements_file):
"""
Load all lines from requirements_file located in requirements_paths.
Returns files and a list of packages, if any, that need to be installed.
"""
# Re-import Version to avoid dealing with multiple flake and pylint errors
from packaging.version import Version # pylint: disable=import-outside-toplevel
all_requirements_to_process = {}
for root in requirements_paths:
for requirements_path in glob.glob(os.path.join(pyscript_folder, root, requirements_file)):
with open(requirements_path, "r", encoding="utf-8") as requirements_fp:
all_requirements_to_process[requirements_path] = requirements_fp.readlines()
all_requirements_to_install = {}
for requirements_path, pkg_lines in all_requirements_to_process.items():
for pkg in pkg_lines:
# Remove inline comments which are accepted by pip but not by Home
# Assistant's installation method.
# https://rosettacode.org/wiki/Strip_comments_from_a_string#Python
i = pkg.find("#")
if i >= 0:
pkg = pkg[:i]
pkg = pkg.strip()
if not pkg or len(pkg) == 0:
continue
try:
# Attempt to get version of package. Do nothing if it's found since
# we want to use the version that's already installed to be safe
parts = pkg.split("==")
if len(parts) > 2 or "," in pkg or ">" in pkg or "<" in pkg:
_LOGGER.error(
(
"Ignoring invalid requirement '%s' specified in '%s'; if a specific version"
"is required, the requirement must use the format 'pkg==version'"
),
requirements_path,
pkg,
)
continue
if len(parts) == 1:
new_version = UNPINNED_VERSION
else:
new_version = parts[1]
pkg_name = parts[0]
current_pinned_version = all_requirements_to_install.get(pkg_name, {}).get(ATTR_VERSION)
current_sources = all_requirements_to_install.get(pkg_name, {}).get(ATTR_SOURCES, [])
# If a version hasn't already been recorded, record this one
if not current_pinned_version:
all_requirements_to_install[pkg_name] = {
ATTR_VERSION: new_version,
ATTR_SOURCES: [requirements_path],
ATTR_INSTALLED_VERSION: get_installed_version(pkg_name),
}
# If the new version is unpinned and there is an existing pinned version, use existing
# pinned version
elif new_version == UNPINNED_VERSION and current_pinned_version != UNPINNED_VERSION:
_LOGGER.warning(
(
"Unpinned requirement for package '%s' detected in '%s' will be ignored in "
"favor of the pinned version '%s' detected in '%s'"
),
pkg_name,
requirements_path,
current_pinned_version,
str(current_sources),
)
# If the new version is pinned and the existing version is unpinned, use the new pinned
# version
elif new_version != UNPINNED_VERSION and current_pinned_version == UNPINNED_VERSION:
_LOGGER.warning(
(
"Unpinned requirement for package '%s' detected in '%s will be ignored in "
"favor of the pinned version '%s' detected in '%s'"
),
pkg_name,
str(current_sources),
new_version,
requirements_path,
)
all_requirements_to_install[pkg_name] = {
ATTR_VERSION: new_version,
ATTR_SOURCES: [requirements_path],
ATTR_INSTALLED_VERSION: get_installed_version(pkg_name),
}
# If the already recorded version is the same as the new version, append the current
# path so we can show sources
elif (
new_version == UNPINNED_VERSION and current_pinned_version == UNPINNED_VERSION
) or Version(current_pinned_version) == Version(new_version):
all_requirements_to_install[pkg_name][ATTR_SOURCES].append(requirements_path)
# If the already recorded version is lower than the new version, use the new one
elif Version(current_pinned_version) < Version(new_version):
_LOGGER.warning(
(
"Version '%s' for package '%s' detected in '%s' will be ignored in "
"favor of the higher version '%s' detected in '%s'"
),
current_pinned_version,
pkg_name,
str(current_sources),
new_version,
requirements_path,
)
all_requirements_to_install[pkg_name].update(
{ATTR_VERSION: new_version, ATTR_SOURCES: [requirements_path]}
)
# If the already recorded version is higher than the new version, ignore the new one
elif Version(current_pinned_version) > Version(new_version):
_LOGGER.warning(
(
"Version '%s' for package '%s' detected in '%s' will be ignored in "
"favor of the higher version '%s' detected in '%s'"
),
new_version,
pkg_name,
requirements_path,
current_pinned_version,
str(current_sources),
)
except ValueError:
# Not valid requirements line so it can be skipped
_LOGGER.debug("Ignoring '%s' because it is not a valid package", pkg)
return all_requirements_to_install
@bind_hass
async def install_requirements(hass, config_entry, pyscript_folder):
"""Install missing requirements from requirements.txt."""
pyscript_installed_packages = config_entry.data.get(CONF_INSTALLED_PACKAGES, {}).copy()
# Import packaging inside install_requirements so that we can use Home Assistant to install it
# if it can't been found
try:
from packaging.version import Version # pylint: disable=import-outside-toplevel
except ModuleNotFoundError:
await async_process_requirements(hass, DOMAIN, ["packaging"])
from packaging.version import Version # pylint: disable=import-outside-toplevel
all_requirements = await hass.async_add_executor_job(
process_all_requirements, pyscript_folder, REQUIREMENTS_PATHS, REQUIREMENTS_FILE
)
requirements_to_install = {}
if all_requirements and not config_entry.data.get(CONF_ALLOW_ALL_IMPORTS, False):
_LOGGER.error(
(
"Requirements detected but 'allow_all_imports' is set to False, set "
"'allow_all_imports' to True if you want packages to be installed"
)
)
return
for package in all_requirements:
pkg_installed_version = all_requirements[package].get(ATTR_INSTALLED_VERSION)
version_to_install = all_requirements[package][ATTR_VERSION]
sources = all_requirements[package][ATTR_SOURCES]
# If package is already installed, we need to run some checks
if pkg_installed_version:
# If the version to install is unpinned and there is already something installed,
# defer to what is installed
if version_to_install == UNPINNED_VERSION:
_LOGGER.debug(
(
"Skipping unpinned version of package '%s' because version '%s' is "
"already installed"
),
package,
pkg_installed_version,
)
# If installed package is not the same version as the one we last installed,
# that means that the package is externally managed now so we shouldn't touch it
# and should remove it from our internal tracker
if (
package in pyscript_installed_packages
and pyscript_installed_packages[package] != pkg_installed_version
):
pyscript_installed_packages.pop(package)
continue
# If installed package is not the same version as the one we last installed,
# that means that the package is externally managed now so we shouldn't touch it
# and should remove it from our internal tracker
if package in pyscript_installed_packages and Version(
pyscript_installed_packages[package]
) != Version(pkg_installed_version):
_LOGGER.warning(
(
"Version '%s' for package '%s' detected in '%s' will be ignored in favor of"
" the version '%s' which was installed outside of pyscript"
),
version_to_install,
package,
str(sources),
pkg_installed_version,
)
pyscript_installed_packages.pop(package)
# If there is a version mismatch between what we want and what is installed, we
# can overwrite it since we know it was last installed by us
elif package in pyscript_installed_packages and Version(version_to_install) != Version(
pkg_installed_version
):
requirements_to_install[package] = all_requirements[package]
# If there is an installed version that we have not previously installed, we
# should not install it
else:
_LOGGER.debug(
(
"Version '%s' for package '%s' detected in '%s' will be ignored because it"
" is already installed"
),
version_to_install,
package,
str(sources),
)
# Anything not already installed in the environment can be installed
else:
requirements_to_install[package] = all_requirements[package]
if requirements_to_install:
_LOGGER.info(
"Installing the following packages: %s",
str(requirements_to_install),
)
await async_process_requirements(
hass,
DOMAIN,
[
f"{package}=={pkg_info[ATTR_VERSION]}"
if pkg_info[ATTR_VERSION] != UNPINNED_VERSION
else package
for package, pkg_info in requirements_to_install.items()
],
)
else:
_LOGGER.debug("No new packages to install")
# Update package tracker in config entry for next time
pyscript_installed_packages.update(
{package: pkg_info[ATTR_VERSION] for package, pkg_info in requirements_to_install.items()}
)
# If any requirements were unpinned, get their version now so they can be pinned later
if any(version == UNPINNED_VERSION for version in pyscript_installed_packages.values()):
pyscript_installed_packages = await hass.async_add_executor_job(
update_unpinned_versions, pyscript_installed_packages
)
if pyscript_installed_packages != config_entry.data.get(CONF_INSTALLED_PACKAGES, {}):
new_data = config_entry.data.copy()
new_data[CONF_INSTALLED_PACKAGES] = pyscript_installed_packages
hass.config_entries.async_update_entry(entry=config_entry, data=new_data)

View File

@@ -0,0 +1,107 @@
# Describes the format for available pyscript services
reload:
name: Reload pyscript
description: Reloads all available pyscripts and restart triggers
fields:
global_ctx:
name: Global Context
description: Only reload this specific global context (file or app)
example: file.example
required: false
selector:
text:
jupyter_kernel_start:
name: Start Jupyter kernel
description: Starts a jupyter kernel for interactive use; Called by Jupyter front end and should generally not be used by users
fields:
shell_port:
name: Shell Port Number
description: Shell port number
example: 63599
required: false
selector:
number:
min: 10240
max: 65535
iopub_port:
name: IOPub Port Number
description: IOPub port number
example: 63598
required: false
selector:
number:
min: 10240
max: 65535
stdin_port:
name: Stdin Port Number
description: Stdin port number
example: 63597
required: false
selector:
number:
min: 10240
max: 65535
control_port:
name: Control Port Number
description: Control port number
example: 63596
required: false
selector:
number:
min: 10240
max: 65535
hb_port:
name: Heartbeat Port Number
description: Heartbeat port number
example: 63595
required: false
selector:
number:
min: 10240
max: 65535
ip:
name: IP Address
description: IP address to connect to Jupyter front end
example: 127.0.0.1
default: 127.0.0.1
required: false
selector:
text:
key:
name: Security Key
description: Used for signing
example: 012345678-9abcdef023456789abcdef
required: true
selector:
text:
transport:
name: Transport Type
description: Transport type
example: tcp
default: tcp
required: false
selector:
select:
options:
- tcp
- udp
signature_scheme:
name: Signing Algorithm
description: Signing algorithm
example: hmac-sha256
required: false
default: hmac-sha256
selector:
select:
options:
- hmac-sha256
kernel_name:
name: Name of Kernel
description: Kernel name
example: pyscript
required: true
default: pyscript
selector:
text:

View File

@@ -0,0 +1,438 @@
"""Handles state variable access and change notification."""
import asyncio
import logging
from homeassistant.core import Context
from homeassistant.helpers.restore_state import DATA_RESTORE_STATE
from homeassistant.helpers.service import async_get_all_descriptions
from .const import LOGGER_PATH
from .entity import PyscriptEntity
from .function import Function
_LOGGER = logging.getLogger(LOGGER_PATH + ".state")
STATE_VIRTUAL_ATTRS = {"entity_id", "last_changed", "last_updated"}
class StateVal(str):
"""Class for representing the value and attributes of a state variable."""
def __new__(cls, state):
"""Create a new instance given a state variable."""
new_var = super().__new__(cls, state.state)
new_var.__dict__ = state.attributes.copy()
new_var.entity_id = state.entity_id
new_var.last_updated = state.last_updated
new_var.last_changed = state.last_changed
return new_var
class State:
"""Class for state functions."""
#
# Global hass instance
#
hass = None
#
# notify message queues by variable
#
notify = {}
#
# Last value of state variable notifications. We maintain this
# so that trigger evaluation can use the last notified value,
# rather than fetching the current value, which is subject to
# race conditions when multiple state variables are set quickly.
#
notify_var_last = {}
#
# pyscript yaml configuration
#
pyscript_config = {}
#
# pyscript vars which have already been registered as persisted
#
persisted_vars = {}
#
# other parameters of all services that have "entity_id" as a parameter
#
service2args = {}
def __init__(self):
"""Warn on State instantiation."""
_LOGGER.error("State class is not meant to be instantiated")
@classmethod
def init(cls, hass):
"""Initialize State."""
cls.hass = hass
@classmethod
async def get_service_params(cls):
"""Get parameters for all services."""
cls.service2args = {}
all_services = await async_get_all_descriptions(cls.hass)
for domain in all_services:
cls.service2args[domain] = {}
for service, desc in all_services[domain].items():
if "entity_id" not in desc["fields"] and "target" not in desc:
continue
cls.service2args[domain][service] = set(desc["fields"].keys())
cls.service2args[domain][service].discard("entity_id")
@classmethod
async def notify_add(cls, var_names, queue):
"""Register to notify state variables changes to be sent to queue."""
added = False
for var_name in var_names if isinstance(var_names, set) else {var_names}:
parts = var_name.split(".")
if len(parts) != 2 and len(parts) != 3:
continue
state_var_name = f"{parts[0]}.{parts[1]}"
if state_var_name not in cls.notify:
cls.notify[state_var_name] = {}
cls.notify[state_var_name][queue] = var_names
added = True
return added
@classmethod
def notify_del(cls, var_names, queue):
"""Unregister notify of state variables changes for given queue."""
for var_name in var_names if isinstance(var_names, set) else {var_names}:
parts = var_name.split(".")
if len(parts) != 2 and len(parts) != 3:
continue
state_var_name = f"{parts[0]}.{parts[1]}"
if state_var_name not in cls.notify or queue not in cls.notify[state_var_name]:
return
del cls.notify[state_var_name][queue]
@classmethod
async def update(cls, new_vars, func_args):
"""Deliver all notifications for state variable changes."""
notify = {}
for var_name, var_val in new_vars.items():
if var_name in cls.notify:
cls.notify_var_last[var_name] = var_val
notify.update(cls.notify[var_name])
if notify:
_LOGGER.debug("state.update(%s, %s)", new_vars, func_args)
for queue, var_names in notify.items():
await queue.put(["state", [cls.notify_var_get(var_names, new_vars), func_args.copy()]])
@classmethod
def notify_var_get(cls, var_names, new_vars):
"""Add values of var_names to new_vars, or default to None."""
notify_vars = new_vars.copy()
for var_name in var_names if var_names is not None else []:
if var_name in notify_vars:
continue
parts = var_name.split(".")
if var_name in cls.notify_var_last:
notify_vars[var_name] = cls.notify_var_last[var_name]
elif len(parts) == 3 and f"{parts[0]}.{parts[1]}" in cls.notify_var_last:
notify_vars[var_name] = getattr(
cls.notify_var_last[f"{parts[0]}.{parts[1]}"], parts[2], None
)
elif len(parts) == 4 and parts[2] == "old" and f"{parts[0]}.{parts[1]}.old" in notify_vars:
notify_vars[var_name] = getattr(notify_vars[f"{parts[0]}.{parts[1]}.old"], parts[3], None)
elif 1 <= var_name.count(".") <= 3 and not cls.exist(var_name):
notify_vars[var_name] = None
return notify_vars
@classmethod
def set(cls, var_name, value=None, new_attributes=None, **kwargs):
"""Set a state variable and optional attributes in hass."""
if var_name.count(".") != 1:
raise NameError(f"invalid name {var_name} (should be 'domain.entity')")
if isinstance(value, StateVal):
if new_attributes is None:
#
# value is a StateVal, so extract the attributes and value
#
new_attributes = value.__dict__.copy()
for discard in STATE_VIRTUAL_ATTRS:
new_attributes.pop(discard, None)
value = str(value)
state_value = None
if value is None or new_attributes is None:
state_value = cls.hass.states.get(var_name)
if value is None and state_value:
value = state_value.state
if new_attributes is None:
if state_value:
new_attributes = state_value.attributes.copy()
else:
new_attributes = {}
curr_task = asyncio.current_task()
if "context" in kwargs and isinstance(kwargs["context"], Context):
context = kwargs["context"]
del kwargs["context"]
else:
context = Function.task2context.get(curr_task, None)
if kwargs:
new_attributes = new_attributes.copy()
new_attributes.update(kwargs)
_LOGGER.debug("setting %s = %s, attr = %s", var_name, value, new_attributes)
cls.hass.states.async_set(var_name, value, new_attributes, context=context)
if var_name in cls.notify_var_last or var_name in cls.notify:
#
# immediately update a variable we are monitoring since it could take a while
# for the state changed event to propagate
#
cls.notify_var_last[var_name] = StateVal(cls.hass.states.get(var_name))
if var_name in cls.persisted_vars:
cls.persisted_vars[var_name].set_state(value)
cls.persisted_vars[var_name].set_attributes(new_attributes)
@classmethod
def setattr(cls, var_attr_name, value):
"""Set a state variable's attribute in hass."""
parts = var_attr_name.split(".")
if len(parts) != 3:
raise NameError(f"invalid name {var_attr_name} (should be 'domain.entity.attr')")
if not cls.exist(f"{parts[0]}.{parts[1]}"):
raise NameError(f"state {parts[0]}.{parts[1]} doesn't exist")
cls.set(f"{parts[0]}.{parts[1]}", **{parts[2]: value})
@classmethod
async def register_persist(cls, var_name):
"""Register pyscript state variable to be persisted with RestoreState."""
if var_name.startswith("pyscript.") and var_name not in cls.persisted_vars:
# this is a hack accessing hass internals; should re-implement using RestoreEntity
restore_data = cls.hass.data[DATA_RESTORE_STATE]
this_entity = PyscriptEntity()
this_entity.entity_id = var_name
cls.persisted_vars[var_name] = this_entity
try:
restore_data.async_restore_entity_added(this_entity)
except TypeError:
restore_data.async_restore_entity_added(var_name)
@classmethod
async def persist(cls, var_name, default_value=None, default_attributes=None):
"""Persist a pyscript domain state variable, and update with optional defaults."""
if var_name.count(".") != 1 or not var_name.startswith("pyscript."):
raise NameError(f"invalid name {var_name} (should be 'pyscript.entity')")
await cls.register_persist(var_name)
exists = cls.exist(var_name)
if not exists and default_value is not None:
cls.set(var_name, default_value, default_attributes)
elif exists and default_attributes is not None:
# Patch the attributes with new values if necessary
current = cls.hass.states.get(var_name)
new_attributes = {k: v for (k, v) in default_attributes.items() if k not in current.attributes}
cls.set(var_name, current.state, **new_attributes)
@classmethod
def exist(cls, var_name):
"""Check if a state variable value or attribute exists in hass."""
parts = var_name.split(".")
if len(parts) != 2 and len(parts) != 3:
return False
value = cls.hass.states.get(f"{parts[0]}.{parts[1]}")
if value is None:
return False
if (
len(parts) == 2
or (parts[0] in cls.service2args and parts[2] in cls.service2args[parts[0]])
or parts[2] in value.attributes
or parts[2] in STATE_VIRTUAL_ATTRS
):
return True
return False
@classmethod
def get(cls, var_name):
"""Get a state variable value or attribute from hass."""
parts = var_name.split(".")
if len(parts) != 2 and len(parts) != 3:
raise NameError(f"invalid name '{var_name}' (should be 'domain.entity' or 'domain.entity.attr')")
state = cls.hass.states.get(f"{parts[0]}.{parts[1]}")
if not state:
raise NameError(f"name '{parts[0]}.{parts[1]}' is not defined")
#
# simplest case is just the state value
#
state = StateVal(state)
if len(parts) == 2:
return state
#
# see if this is a service that has an entity_id parameter
#
if parts[0] in cls.service2args and parts[2] in cls.service2args[parts[0]]:
params = cls.service2args[parts[0]][parts[2]]
def service_call_factory(domain, service, entity_id, params):
async def service_call(*args, **kwargs):
curr_task = asyncio.current_task()
hass_args = {}
for keyword, typ, default in [
("context", [Context], Function.task2context.get(curr_task, None)),
("blocking", [bool], None),
("return_response", [bool], None),
("limit", [float, int], None),
]:
if keyword in kwargs and type(kwargs[keyword]) in typ:
hass_args[keyword] = kwargs.pop(keyword)
elif default:
hass_args[keyword] = default
kwargs["entity_id"] = entity_id
if len(args) == 1 and len(params) == 1:
#
# with just a single parameter and positional argument, create the keyword setting
#
[param_name] = params
kwargs[param_name] = args[0]
elif len(args) != 0:
raise TypeError(f"service {domain}.{service} takes no positional arguments")
# return await Function.hass_services_async_call(domain, service, kwargs, **hass_args)
return await cls.hass.services.async_call(domain, service, kwargs, **hass_args)
return service_call
return service_call_factory(parts[0], parts[2], f"{parts[0]}.{parts[1]}", params)
#
# finally see if it is an attribute
#
try:
return getattr(state, parts[2])
except AttributeError:
raise AttributeError( # pylint: disable=raise-missing-from
f"state '{parts[0]}.{parts[1]}' has no attribute '{parts[2]}'"
)
@classmethod
def delete(cls, var_name, context=None):
"""Delete a state variable or attribute from hass."""
parts = var_name.split(".")
if not context:
context = Function.task2context.get(asyncio.current_task(), None)
context_arg = {"context": context} if context else {}
if len(parts) == 2:
if var_name in cls.notify_var_last or var_name in cls.notify:
#
# immediately update a variable we are monitoring since it could take a while
# for the state changed event to propagate
#
cls.notify_var_last[var_name] = None
if not cls.hass.states.async_remove(var_name, **context_arg):
raise NameError(f"name '{var_name}' not defined")
return
if len(parts) == 3:
var_name = f"{parts[0]}.{parts[1]}"
value = cls.hass.states.get(var_name)
if value is None:
raise NameError(f"state {var_name} doesn't exist")
new_attr = value.attributes.copy()
if parts[2] not in new_attr:
raise AttributeError(f"state '{var_name}' has no attribute '{parts[2]}'")
del new_attr[parts[2]]
cls.set(f"{var_name}", value.state, new_attributes=new_attr, **context_arg)
return
raise NameError(f"invalid name '{var_name}' (should be 'domain.entity' or 'domain.entity.attr')")
@classmethod
def getattr(cls, var_name):
"""Return a dict of attributes for a state variable."""
if isinstance(var_name, StateVal):
attrs = var_name.__dict__.copy()
for discard in STATE_VIRTUAL_ATTRS:
attrs.pop(discard, None)
return attrs
if var_name.count(".") != 1:
raise NameError(f"invalid name {var_name} (should be 'domain.entity')")
value = cls.hass.states.get(var_name)
if not value:
return None
return value.attributes.copy()
@classmethod
def get_attr(cls, var_name):
"""Return a dict of attributes for a state variable - deprecated."""
_LOGGER.warning("state.get_attr() is deprecated: use state.getattr() instead")
return cls.getattr(var_name)
@classmethod
def completions(cls, root):
"""Return possible completions of state variables."""
words = set()
parts = root.split(".")
num_period = len(parts) - 1
if num_period == 2:
#
# complete state attributes
#
last_period = root.rfind(".")
name = root[0:last_period]
value = cls.hass.states.get(name)
if value:
attr_root = root[last_period + 1 :]
attrs = set(value.attributes.keys()).union(STATE_VIRTUAL_ATTRS)
if parts[0] in cls.service2args:
attrs.update(set(cls.service2args[parts[0]].keys()))
for attr_name in attrs:
if attr_name.lower().startswith(attr_root):
words.add(f"{name}.{attr_name}")
elif num_period < 2:
#
# complete among all state names
#
for name in cls.hass.states.async_all():
if name.entity_id.lower().startswith(root):
words.add(name.entity_id)
return words
@classmethod
async def names(cls, domain=None):
"""Implement names, which returns all entity_ids."""
return cls.hass.states.async_entity_ids(domain)
@classmethod
def register_functions(cls):
"""Register state functions and config variable."""
functions = {
"state.get": cls.get,
"state.set": cls.set,
"state.setattr": cls.setattr,
"state.names": cls.names,
"state.getattr": cls.getattr,
"state.get_attr": cls.get_attr, # deprecated form; to be removed
"state.persist": cls.persist,
"state.delete": cls.delete,
"pyscript.config": cls.pyscript_config,
}
Function.register(functions)
@classmethod
def set_pyscript_config(cls, config):
"""Set pyscript yaml config."""
#
# have to update inplace, since dest is already used as value
#
cls.pyscript_config.clear()
for name, value in config.items():
cls.pyscript_config[name] = value

View File

@@ -0,0 +1,38 @@
{
"config": {
"step": {
"user": {
"title": "pyscript",
"description": "Once you have created an entry, refer to the [docs](https://hacs-pyscript.readthedocs.io/en/latest/) to learn how to create scripts and functions.",
"data": {
"allow_all_imports": "Allow All Imports?",
"hass_is_global": "Access hass as a global variable?"
}
}
},
"abort": {
"already_configured": "Already configured.",
"single_instance_allowed": "Already configured. Only a single configuration possible.",
"updated_entry": "This entry has already been setup but the configuration has been updated."
}
},
"options": {
"step": {
"init": {
"title": "Update pyscript configuration",
"data": {
"allow_all_imports": "Allow All Imports?",
"hass_is_global": "Access hass as a global variable?"
}
},
"no_ui_configuration_allowed": {
"title": "No UI configuration allowed",
"description": "This entry was created via `configuration.yaml`, so all configuration parameters must be updated there. The [`pyscript.reload`](developer-tools/service) service will allow you to apply the changes you make to `configuration.yaml` without restarting your Home Assistant instance."
},
"no_update": {
"title": "No update needed",
"description": "There is nothing to update."
}
}
}
}

View File

@@ -0,0 +1,38 @@
{
"config": {
"step": {
"user": {
"title": "pyscript",
"description": "Wenn Sie einen Eintrag angelegt haben, können Sie sich die [Doku (Englisch)](https://hacs-pyscript.readthedocs.io/en/latest/) ansehen, um zu lernen wie Sie Scripts und Funktionen erstellen können.",
"data": {
"allow_all_imports": "Alle Importe erlauben?",
"hass_is_global": "Home Assistant als globale Variable verwenden?"
}
}
},
"abort": {
"already_configured": "Bereits konfiguriert.",
"single_instance_allowed": "Bereits konfiguriert. Es ist nur eine Konfiguration gleichzeitig möglich",
"updated_entry": "Der Eintrag wurde bereits erstellt, aber die Konfiguration wurde aktualisiert."
}
},
"options": {
"step": {
"init": {
"title": "Pyscript configuration aktualisieren",
"data": {
"allow_all_imports": "Alle Importe erlauben??",
"hass_is_global": "Home Assistant als globale Variable verwenden?"
}
},
"no_ui_configuration_allowed": {
"title": "Die Konfiguartion der graphischen Nutzeroberfläche ist deaktiviert",
"description": "Der Eintrag wurde über die Datei `configuration.yaml` erstellt. Alle Konfigurationsparameter müssen desshalb dort eingestellt werden. Der [`pyscript.reload`](developer-tools/service) Service übernimmt alle Änderungen aus `configuration.yaml`, ohne dass Home Assistant neu gestartet werden muss."
},
"no_update": {
"title": "Keine Aktualisierung notwendig",
"description": "Es gibt nichts zu aktualisieren."
}
}
}
}

View File

@@ -0,0 +1,38 @@
{
"config": {
"step": {
"user": {
"title": "pyscript",
"description": "Once you have created an entry, refer to the [docs](https://hacs-pyscript.readthedocs.io/en/latest/) to learn how to create scripts and functions.",
"data": {
"allow_all_imports": "Allow All Imports?",
"hass_is_global": "Access hass as a global variable?"
}
}
},
"abort": {
"already_configured": "Already configured.",
"single_instance_allowed": "Already configured. Only a single configuration possible.",
"updated_entry": "This entry has already been setup but the configuration has been updated."
}
},
"options": {
"step": {
"init": {
"title": "Update pyscript configuration",
"data": {
"allow_all_imports": "Allow All Imports?",
"hass_is_global": "Access hass as a global variable?"
}
},
"no_ui_configuration_allowed": {
"title": "No UI configuration allowed",
"description": "This entry was created via `configuration.yaml`, so all configuration parameters must be updated there. The [`pyscript.reload`](developer-tools/service) service will allow you to apply the changes you make to `configuration.yaml` without restarting your Home Assistant instance."
},
"no_update": {
"title": "No update needed",
"description": "There is nothing to update."
}
}
}
}

View File

@@ -0,0 +1,38 @@
{
"config": {
"step": {
"user": {
"title": "pyscript",
"description": "Akonáhle ste vytvorili položku, pozrite si [docs](https://hacs-pyscript.readthedocs.io/en/latest/) naučiť sa, ako vytvárať skripty a funkcie.",
"data": {
"allow_all_imports": "Povoliť všetky importy?",
"hass_is_global": "Prístup k globálnej premennej?"
}
}
},
"abort": {
"already_configured": "Už konfigurované.",
"single_instance_allowed": "Už nakonfigurované. Iba jedna možná konfigurácia.",
"updated_entry": "Táto položka už bola nastavená, ale konfigurácia bola aktualizovaná."
}
},
"options": {
"step": {
"init": {
"title": "Aktualizovať pyscript konfiguráciu",
"data": {
"allow_all_imports": "povoliť všetky importy?",
"hass_is_global": "Prístup k globálnej premennej?"
}
},
"no_ui_configuration_allowed": {
"title": "Nie je povolená konfigurácia používateľského rozhrania",
"description": "Tento záznam bol vytvorený cez `configuration.yaml`, Takže všetky konfiguračné parametre sa musia aktualizovať. [`pyscript.reload`](developer-tools/service) Služba vám umožní uplatniť zmeny, ktoré vykonáte `configuration.yaml` bez reštartovania inštancie Home Assistant."
},
"no_update": {
"title": "Nie je potrebná aktualizácia",
"description": "Nie je nič na aktualizáciu."
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,95 @@
"""Handles webhooks and notification."""
import logging
from aiohttp import hdrs
from homeassistant.components import webhook
from .const import LOGGER_PATH
_LOGGER = logging.getLogger(LOGGER_PATH + ".webhook")
class Webhook:
"""Define webhook functions."""
#
# Global hass instance
#
hass = None
#
# notify message queues by webhook type
#
notify = {}
notify_remove = {}
def __init__(self):
"""Warn on Webhook instantiation."""
_LOGGER.error("Webhook class is not meant to be instantiated")
@classmethod
def init(cls, hass):
"""Initialize Webhook."""
cls.hass = hass
@classmethod
async def webhook_handler(cls, hass, webhook_id, request):
"""Listen callback for given webhook which updates any notifications."""
func_args = {
"trigger_type": "webhook",
"webhook_id": webhook_id,
}
if "json" in request.headers.get(hdrs.CONTENT_TYPE, ""):
func_args["payload"] = await request.json()
else:
# Could potentially return multiples of a key - only take the first
payload_multidict = await request.post()
func_args["payload"] = {k: payload_multidict.getone(k) for k in payload_multidict.keys()}
await cls.update(webhook_id, func_args)
@classmethod
def notify_add(cls, webhook_id, local_only, methods, queue):
"""Register to notify for webhooks of given type to be sent to queue."""
if webhook_id not in cls.notify:
cls.notify[webhook_id] = set()
_LOGGER.debug("webhook.notify_add(%s) -> adding webhook listener", webhook_id)
webhook.async_register(
cls.hass,
"pyscript", # DOMAIN
"pyscript", # NAME
webhook_id,
cls.webhook_handler,
local_only=local_only,
allowed_methods=methods,
)
cls.notify_remove[webhook_id] = lambda: webhook.async_unregister(cls.hass, webhook_id)
cls.notify[webhook_id].add(queue)
@classmethod
def notify_del(cls, webhook_id, queue):
"""Unregister to notify for webhooks of given type for given queue."""
if webhook_id not in cls.notify or queue not in cls.notify[webhook_id]:
return
cls.notify[webhook_id].discard(queue)
if len(cls.notify[webhook_id]) == 0:
cls.notify_remove[webhook_id]()
_LOGGER.debug("webhook.notify_del(%s) -> removing webhook listener", webhook_id)
del cls.notify[webhook_id]
del cls.notify_remove[webhook_id]
@classmethod
async def update(cls, webhook_id, func_args):
"""Deliver all notifications for an webhook of the given type."""
_LOGGER.debug("webhook.update(%s, %s)", webhook_id, func_args)
if webhook_id in cls.notify:
for queue in cls.notify[webhook_id]:
await queue.put(["webhook", func_args.copy()])

View File

@@ -0,0 +1,143 @@
"""Initialisation du package de l'intégration HACS Tuto"""
import logging
import voluptuous as vol
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.core import HomeAssistant
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import ConfigType
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import selector
from homeassistant.components.input_boolean import DOMAIN as INPUT_BOOLEAN_DOMAIN
from homeassistant.components.input_number import DOMAIN as INPUT_NUMBER_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.components.humidifier import DOMAIN as HUMIDIFIER_DOMAIN
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN
# from homeassistant.helpers.entity_component import EntityComponent
from .const import DOMAIN, PLATFORMS
from .coordinator import SolarOptimizerCoordinator
# from .input_boolean import async_setup_entry as async_setup_entry_input_boolean
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
"algorithm": vol.Schema(
{
vol.Required("initial_temp", default=1000): vol.Coerce(float),
vol.Required("min_temp", default=0.1): vol.Coerce(float),
vol.Required("cooling_factor", default=0.95): vol.Coerce(float),
vol.Required(
"max_iteration_number", default=1000
): cv.positive_int,
}
),
"devices": vol.All(
[
{
vol.Required("name"): str,
vol.Required("entity_id"): selector.EntitySelector(
selector.EntitySelectorConfig(
domain=[INPUT_BOOLEAN_DOMAIN, SWITCH_DOMAIN, HUMIDIFIER_DOMAIN, CLIMATE_DOMAIN, BUTTON_DOMAIN]
)
),
vol.Optional("power_entity_id"): selector.EntitySelector(
selector.EntitySelectorConfig(
domain=[INPUT_NUMBER_DOMAIN, NUMBER_DOMAIN]
)
),
vol.Required("power_max"): vol.Coerce(float),
vol.Optional("power_min"): vol.Coerce(float),
vol.Optional("power_step"): vol.Coerce(float),
vol.Optional("check_usable_template"): str,
vol.Optional("check_active_template"): str,
vol.Optional("duration_min"): vol.Coerce(float),
vol.Optional("duration_stop_min"): vol.Coerce(float),
vol.Optional("duration_power_min"): vol.Coerce(float),
vol.Optional("action_mode"): str,
vol.Required("activation_service"): str,
vol.Required("deactivation_service"): str,
vol.Optional("change_power_service"): str,
vol.Optional("convert_power_divide_factor"): vol.Coerce(
float
),
vol.Optional("battery_soc_threshold", default=0): vol.Coerce(float),
}
]
),
}
),
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(
hass: HomeAssistant, config: ConfigType
): # pylint: disable=unused-argument
"""Initialisation de l'intégration"""
_LOGGER.info(
"Initializing %s integration with plaforms: %s with config: %s",
DOMAIN,
PLATFORMS,
config.get(DOMAIN),
)
hass.data.setdefault(DOMAIN, {})
# L'argument config contient votre fichier configuration.yaml
solar_optimizer_config = config.get(DOMAIN)
hass.data[DOMAIN]["coordinator"] = coordinator = SolarOptimizerCoordinator(
hass, solar_optimizer_config
)
hass.bus.async_listen_once("homeassistant_started", coordinator.on_ha_started)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Creation des entités à partir d'une configEntry"""
_LOGGER.debug(
"Appel de async_setup_entry entry: entry_id='%s', data='%s'",
entry.entry_id,
entry.data,
)
hass.data.setdefault(DOMAIN, {})
# Enregistrement de l'écouteur de changement 'update_listener'
entry.async_on_unload(entry.add_update_listener(update_listener))
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Fonction qui force le rechargement des entités associées à une configEntry"""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Handle removal of an entry."""
if unloaded := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
pass
# hass.data[DOMAIN].pop(entry.entry_id)
return unloaded
async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Reload config entry."""
await async_unload_entry(hass, entry)
# await async_setup_entry(hass, entry)

View File

@@ -0,0 +1,145 @@
""" Le Config Flow """
import logging
from typing import Any
import copy
from collections.abc import Mapping
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.config_entries import (
ConfigFlow,
FlowResult,
OptionsFlow,
ConfigEntry,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.input_number import DOMAIN as INPUT_NUMBER_DOMAIN
from homeassistant.helpers import selector
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
solar_optimizer_schema = {
vol.Required("refresh_period_sec", default=300): int,
vol.Required("power_consumption_entity_id"): selector.EntitySelector(
selector.EntitySelectorConfig(domain=[SENSOR_DOMAIN, INPUT_NUMBER_DOMAIN])
),
vol.Required("power_production_entity_id"): selector.EntitySelector(
selector.EntitySelectorConfig(domain=[SENSOR_DOMAIN, INPUT_NUMBER_DOMAIN])
),
vol.Required("sell_cost_entity_id"): selector.EntitySelector(
selector.EntitySelectorConfig(domain=[SENSOR_DOMAIN, INPUT_NUMBER_DOMAIN])
),
vol.Required("buy_cost_entity_id"): selector.EntitySelector(
selector.EntitySelectorConfig(domain=[SENSOR_DOMAIN, INPUT_NUMBER_DOMAIN])
),
vol.Required("sell_tax_percent_entity_id"): selector.EntitySelector(
selector.EntitySelectorConfig(domain=[INPUT_NUMBER_DOMAIN])
),
vol.Optional("smooth_production", default=True): cv.boolean,
vol.Optional("battery_soc_entity_id"): selector.EntitySelector(
selector.EntitySelectorConfig(domain=[SENSOR_DOMAIN, INPUT_NUMBER_DOMAIN])
),
}
class SolarOptimizerConfigFlow(ConfigFlow, domain=DOMAIN):
"""La classe qui implémente le config flow pour notre DOMAIN.
Elle doit dériver de FlowHandler"""
# La version de notre configFlow. Va permettre de migrer les entités
# vers une version plus récente en cas de changement
VERSION = 1
_user_inputs: dict = {}
async def async_step_user(self, user_input: dict | None = None) -> FlowResult:
"""Gestion de l'étape 'user'. Point d'entrée de notre
configFlow. Cette méthode est appelée 2 fois :
1. une première fois sans user_input -> on affiche le formulaire de configuration
2. une deuxième fois avec les données saisies par l'utilisateur dans user_input -> on sauvegarde les données saisies
"""
user_form = vol.Schema(solar_optimizer_schema)
if user_input is None:
_LOGGER.debug(
"config_flow step user (1). 1er appel : pas de user_input -> on affiche le form user_form"
)
return self.async_show_form(step_id="user", data_schema=user_form)
# 2ème appel : il y a des user_input -> on stocke le résultat
self._user_inputs.update(user_input)
_LOGGER.debug(
"config_flow step2 (2). L'ensemble de la configuration est: %s",
self._user_inputs,
)
return self.async_create_entry(title="SolarOptimizer", data=self._user_inputs)
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry):
"""Get options flow for this handler"""
return SolarOptimizerOptionsFlow(config_entry)
class SolarOptimizerOptionsFlow(OptionsFlow):
"""The class which enable to modified the configuration"""
_user_inputs: dict = {}
config_entry: ConfigEntry = None
def __init__(self, config_entry: ConfigEntry) -> None:
"""Initialisation de l'option flow. On a le ConfigEntry existant en entrée"""
self.config_entry = config_entry
# On initialise les user_inputs avec les données du configEntry
self._user_inputs = config_entry.data.copy()
async def async_step_init(self, user_input: dict | None = None) -> FlowResult:
"""Gestion de l'étape 'user'. Point d'entrée de notre
configFlow. Cette méthode est appelée 2 fois :
1. une première fois sans user_input -> on affiche le formulaire de configuration
2. une deuxième fois avec les données saisies par l'utilisateur dans user_input -> on sauvegarde les données saisies
"""
user_form = vol.Schema(solar_optimizer_schema)
if user_input is None:
_LOGGER.debug(
"config_flow step user (1). 1er appel : pas de user_input -> on affiche le form user_form"
)
return self.async_show_form(
step_id="init",
data_schema=self.add_suggested_values_to_schema(
data_schema=user_form,
suggested_values=self._user_inputs,
),
)
# 2ème appel : il y a des user_input -> on stocke le résultat
self._user_inputs.update(user_input)
_LOGGER.debug(
"config_flow step_user (2). L'ensemble de la configuration est: %s",
self._user_inputs,
)
# On appelle le step de fin pour enregistrer les modifications
return await self.async_end()
async def async_end(self):
"""Finalization of the ConfigEntry creation"""
_LOGGER.info(
"Recreation de l'entry %s. La nouvelle config est maintenant : %s",
self.config_entry.entry_id,
self._user_inputs,
)
# Modification des data de la configEntry
# (et non pas ajout d'un objet options dans la configEntry)
self.hass.config_entries.async_update_entry(
self.config_entry, data=self._user_inputs
)
# Suppression de l'objet options dans la configEntry
return self.async_create_entry(title=None, data=None)

View File

@@ -0,0 +1,41 @@
""" Les constantes pour l'intégration Solar Optimizer """
from slugify import slugify
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.util import dt as dt_util
DOMAIN = "solar_optimizer"
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.SWITCH]
DEFAULT_REFRESH_PERIOD_SEC = 300
CONF_ACTION_MODE_SERVICE = "service_call"
CONF_ACTION_MODE_EVENT = "event"
CONF_ACTION_MODES = [CONF_ACTION_MODE_SERVICE, CONF_ACTION_MODE_EVENT]
EVENT_TYPE_SOLAR_OPTIMIZER_CHANGE_POWER = "solar_optimizer_change_power_event"
EVENT_TYPE_SOLAR_OPTIMIZER_STATE_CHANGE = "solar_optimizer_state_change_event"
EVENT_TYPE_SOLAR_OPTIMIZER_ENABLE_STATE_CHANGE = (
"solar_optimizer_enable_state_change_event"
)
def get_tz(hass: HomeAssistant):
"""Get the current timezone"""
return dt_util.get_time_zone(hass.config.time_zone)
def name_to_unique_id(name: str) -> str:
"""Convert a name to a unique id. Replace ' ' by _"""
return slugify(name).replace("-", "_")
class ConfigurationError(Exception):
"""An error in configuration"""
def __init__(self, message):
super().__init__(message)

View File

@@ -0,0 +1,228 @@
""" The data coordinator class """
import logging
import math
from datetime import timedelta
from homeassistant.core import HomeAssistant # callback
from homeassistant.helpers.update_coordinator import (
DataUpdateCoordinator,
)
from homeassistant.config_entries import ConfigEntry
from .const import DEFAULT_REFRESH_PERIOD_SEC, name_to_unique_id
from .managed_device import ManagedDevice
from .simulated_annealing_algo import SimulatedAnnealingAlgorithm
_LOGGER = logging.getLogger(__name__)
def get_safe_float(hass, entity_id: str):
"""Get a safe float state value for an entity.
Return None if entity is not available"""
if entity_id is None or not (state := hass.states.get(entity_id)) or state.state == "unknown" or state.state == "unavailable":
return None
float_val = float(state.state)
return None if math.isinf(float_val) or not math.isfinite(float_val) else float_val
class SolarOptimizerCoordinator(DataUpdateCoordinator):
"""The coordinator class which is used to coordinate all update"""
_devices: list[ManagedDevice]
_power_consumption_entity_id: str
_power_production_entity_id: str
_sell_cost_entity_id: str
_buy_cost_entity_id: str
_sell_tax_percent_entity_id: str
_battery_soc_entity_id: str
_smooth_production: bool
_last_production: float
_algo: SimulatedAnnealingAlgorithm
def __init__(self, hass: HomeAssistant, config):
"""Initialize the coordinator"""
super().__init__(
hass,
_LOGGER,
name="Solar Optimizer",
# update_interval=timedelta(seconds=refresh_period_sec),
) # pylint : disable=line-too-long
self._devices = []
try:
for _, device in enumerate(config.get("devices")):
_LOGGER.debug("Configuration of manageable device: %s", device)
self._devices.append(ManagedDevice(hass, device))
except Exception as err:
_LOGGER.error(err)
_LOGGER.error(
"Your 'devices' configuration is wrong. SolarOptimizer will not be operational until you fix it"
)
raise err
algo_config = config.get("algorithm")
self._algo = SimulatedAnnealingAlgorithm(
float(algo_config.get("initial_temp")),
float(algo_config.get("min_temp")),
float(algo_config.get("cooling_factor")),
int(algo_config.get("max_iteration_number")),
)
self.config = config
async def configure(self, config: ConfigEntry) -> None:
"""Configure the coordinator from configEntry of the integration"""
refresh_period_sec = (
config.data.get("refresh_period_sec") or DEFAULT_REFRESH_PERIOD_SEC
)
self.update_interval = timedelta(seconds=refresh_period_sec)
self._schedule_refresh()
self._power_consumption_entity_id = config.data.get(
"power_consumption_entity_id"
)
self._power_production_entity_id = config.data.get("power_production_entity_id")
self._sell_cost_entity_id = config.data.get("sell_cost_entity_id")
self._buy_cost_entity_id = config.data.get("buy_cost_entity_id")
self._sell_tax_percent_entity_id = config.data.get("sell_tax_percent_entity_id")
self._battery_soc_entity_id = config.data.get("battery_soc_entity_id")
self._smooth_production = config.data.get("smooth_production") is True
self._last_production = 0.0
# Do not calculate immediatly because switch state are not restored yet. Wait for homeassistant_started event
# which is captured in onHAStarted method
# await self.async_config_entry_first_refresh()
async def on_ha_started(self, _) -> None:
"""Listen the homeassistant_started event to initialize the first calculation"""
_LOGGER.info("First initialization of Solar Optimizer")
await self.async_config_entry_first_refresh()
async def _async_update_data(self):
_LOGGER.info("Refreshing Solar Optimizer calculation")
calculated_data = {}
# Add a device state attributes
for _, device in enumerate(self._devices):
# Initialize current power depending or reality
device.set_current_power_with_device_state()
# Add a power_consumption and power_production
power_production = get_safe_float(self.hass, self._power_production_entity_id)
if not power_production:
_LOGGER.warning(
"Power production is not valued. Solar Optimizer will be disabled"
)
return None
if not self._smooth_production:
calculated_data["power_production"] = power_production
else:
self._last_production = round(
0.5 * self._last_production + 0.5 * power_production
)
calculated_data["power_production"] = self._last_production
calculated_data["power_production_brut"] = power_production
calculated_data["power_consumption"] = get_safe_float(
self.hass, self._power_consumption_entity_id
)
calculated_data["sell_cost"] = get_safe_float(
self.hass, self._sell_cost_entity_id
)
calculated_data["buy_cost"] = get_safe_float(
self.hass, self._buy_cost_entity_id
)
calculated_data["sell_tax_percent"] = get_safe_float(
self.hass, self._sell_tax_percent_entity_id
)
soc = get_safe_float(self.hass, self._battery_soc_entity_id)
calculated_data["battery_soc"] = soc if soc is not None else 0
#
# Call Algorithm Recuit simulé
#
best_solution, best_objective, total_power = self._algo.recuit_simule(
self._devices,
calculated_data["power_consumption"],
calculated_data["power_production"],
calculated_data["sell_cost"],
calculated_data["buy_cost"],
calculated_data["sell_tax_percent"],
calculated_data["battery_soc"]
)
calculated_data["best_solution"] = best_solution
calculated_data["best_objective"] = best_objective
calculated_data["total_power"] = total_power
# Uses the result to turn on or off or change power
should_log = False
for _, equipement in enumerate(best_solution):
_LOGGER.debug("Dealing with best_solution for %s", equipement)
name = equipement["name"]
requested_power = equipement.get("requested_power")
state = equipement["state"]
device = self.get_device_by_name(name)
if not device:
continue
is_active = device.is_active
if is_active and not state:
_LOGGER.debug("Extinction de %s", name)
should_log = True
await device.deactivate()
elif not is_active and state:
_LOGGER.debug("Allumage de %s", name)
should_log = True
await device.activate(requested_power)
# Send change power if state is now on and change power is accepted and (power have change or eqt is just activated)
if (
state
and device.can_change_power
and (device.current_power != requested_power or not is_active)
):
_LOGGER.debug(
"Change power of %s to %s",
equipement["name"],
requested_power,
)
should_log = True
await device.change_requested_power(requested_power)
# Add updated data to the result
calculated_data[name_to_unique_id(name)] = device
if should_log:
_LOGGER.info("Calculated data are: %s", calculated_data)
else:
_LOGGER.debug("Calculated data are: %s", calculated_data)
return calculated_data
@property
def devices(self) -> list[ManagedDevice]:
"""Get all the managed device"""
return self._devices
def get_device_by_name(self, name: str) -> ManagedDevice | None:
"""Returns the device which name is given in argument"""
for _, device in enumerate(self._devices):
if device.name == name:
return device
return None
def get_device_by_unique_id(self, uid: str) -> ManagedDevice | None:
"""Returns the device which name is given in argument"""
for _, device in enumerate(self._devices):
if device.unique_id == uid:
return device
return None

View File

@@ -0,0 +1,7 @@
{
"name": "Solar Optimizer",
"content_in_root": false,
"render_readme": true,
"hide_default_branch": false,
"homeassistant": "2023.6.1"
}

View File

@@ -0,0 +1,479 @@
""" A ManagedDevice represent a device than can be managed by the optimisatiion algorithm"""
import logging
from datetime import datetime, timedelta
from homeassistant.core import HomeAssistant
from homeassistant.helpers.template import Template
from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
from .const import (
get_tz,
name_to_unique_id,
CONF_ACTION_MODE_SERVICE,
CONF_ACTION_MODE_EVENT,
CONF_ACTION_MODES,
ConfigurationError,
EVENT_TYPE_SOLAR_OPTIMIZER_CHANGE_POWER,
EVENT_TYPE_SOLAR_OPTIMIZER_STATE_CHANGE,
EVENT_TYPE_SOLAR_OPTIMIZER_ENABLE_STATE_CHANGE,
)
ACTION_ACTIVATE = "Activate"
ACTION_DEACTIVATE = "Deactivate"
ACTION_CHANGE_POWER = "ChangePower"
_LOGGER = logging.getLogger(__name__)
async def do_service_action(
hass: HomeAssistant,
entity_id,
action_type,
service_name,
current_power,
requested_power,
convert_power_divide_factor,
):
"""Activate an entity via a service call"""
_LOGGER.info("Calling service %s for entity %s", service_name, entity_id)
parties = service_name.split("/")
if len(parties) != 2:
raise ConfigurationError(
f"Incorrect service declaration for entity {entity_id}. Service {service_name} should be formatted with: 'domain/service'"
)
if action_type == ACTION_CHANGE_POWER:
value = round(requested_power / convert_power_divide_factor)
service_data = {"value": value}
else:
service_data = {}
target = {
"entity_id": entity_id,
}
await hass.services.async_call(
parties[0], parties[1], service_data=service_data, target=target
)
# Also send an event to inform
do_event_action(
hass,
entity_id,
action_type,
current_power,
requested_power,
EVENT_TYPE_SOLAR_OPTIMIZER_STATE_CHANGE,
)
def do_event_action(
hass: HomeAssistant,
entity_id,
action_type,
current_power,
requested_power,
event_type: str,
):
"""Activate an entity via an event"""
_LOGGER.info(
"Sending event %s with action %s for entity %s with requested_power %s and current_power %s",
event_type,
action_type,
entity_id,
requested_power,
current_power,
)
hass.bus.fire(
event_type=event_type,
event_data={
"action_type": action_type,
"requested_power": requested_power,
"current_power": current_power,
"entity_id": entity_id,
},
)
class ManagedDevice:
"""A Managed device representation"""
_name: str
_unique_id: str
_entity_id: str
_power_entity_id: str
_power_max: int
_power_min: int
_power_step: int
_can_change_power: bool
_current_power: int
_requested_power: int
_duration_sec: int
_duration_stop_sec: int
_duration_power_sec: int
_check_usable_template: Template
_check_active_template: Template
_next_date_available: datetime
_next_date_available_power: datetime
_action_mode: str
_activation_service: str
_deactivation_service: str
_change_power_service: str
_convert_power_divide_factor: int
_battery_soc: float
_battery_soc_threshold: float
def __init__(self, hass: HomeAssistant, device_config):
"""Initialize a manageable device"""
self._hass = hass
self._name = device_config.get("name")
self._unique_id = name_to_unique_id(self._name)
self._entity_id = device_config.get("entity_id")
self._power_entity_id = device_config.get("power_entity_id")
self._power_max = int(device_config.get("power_max"))
self._power_min = int(device_config.get("power_min") or -1)
self._power_step = int(device_config.get("power_step") or 0)
self._can_change_power = self._power_min >= 0
self._convert_power_divide_factor = int(
device_config.get("convert_power_divide_factor") or 1
)
self._current_power = self._requested_power = 0
duration_min = float(device_config.get("duration_min"))
self._duration_sec = round(duration_min * 60)
self._duration_power_sec = round(
float(device_config.get("duration_power_min") or duration_min) * 60
)
self._duration_stop_sec = round(
float(device_config.get("duration_stop_min") or duration_min) * 60
)
if device_config.get("check_usable_template"):
self._check_usable_template = Template(
device_config.get("check_usable_template"), hass
)
else:
# If no template for usability, the device is supposed to be always usable
self._check_usable_template = Template("{{ True }}", hass)
if device_config.get("check_active_template"):
self._check_active_template = Template(
device_config.get("check_active_template"), hass
)
else:
template_string = (
"{{ is_state('" + self._entity_id + "', '" + STATE_ON + "') }}"
)
self._check_active_template = Template(template_string, hass)
self._next_date_available_power = self._next_date_available = datetime.now(
get_tz(hass)
)
self._action_mode = device_config.get("action_mode")
self._activation_service = device_config.get("activation_service")
self._deactivation_service = device_config.get("deactivation_service")
self._change_power_service = device_config.get("change_power_service")
self._battery_soc = None
self._battery_soc_threshold = float(device_config.get("battery_soc_threshold") or 0)
if self.is_active:
self._requested_power = self._current_power = (
self._power_max if self._can_change_power else self._power_min
)
self._enable = True
async def _apply_action(self, action_type: str, requested_power=None):
"""Apply an action to a managed device.
This method is a generical method for activate, deactivate, change_requested_power
"""
_LOGGER.debug(
"Applying action %s for entity %s. requested_power=%s",
action_type,
self._entity_id,
requested_power,
)
if requested_power is not None:
self._requested_power = requested_power
if self._action_mode == CONF_ACTION_MODE_SERVICE:
method = None
entity_id = self._entity_id
if action_type == ACTION_ACTIVATE:
method = self._activation_service
self.reset_next_date_available(action_type)
if self._can_change_power:
self.reset_next_date_available_power()
elif action_type == ACTION_DEACTIVATE:
method = self._deactivation_service
self.reset_next_date_available(action_type)
elif action_type == ACTION_CHANGE_POWER:
assert (
self._can_change_power
), f"Equipment {self._name} cannot change its power. We should not be there."
method = self._change_power_service
entity_id = self._power_entity_id
self.reset_next_date_available_power()
await do_service_action(
self._hass,
entity_id,
action_type,
method,
self._current_power,
self._requested_power,
self._convert_power_divide_factor,
)
elif self._action_mode == CONF_ACTION_MODE_EVENT:
do_event_action(
self._hass,
self._entity_id,
action_type,
self._current_power,
self._requested_power,
EVENT_TYPE_SOLAR_OPTIMIZER_CHANGE_POWER,
)
else:
raise ConfigurationError(
f"Incorrect action_mode declaration for entity '{self._entity_id}'. Action_mode '{self._action_mode}' is not supported. Use one of {CONF_ACTION_MODES}"
)
self._current_power = self._requested_power
async def activate(self, requested_power=None):
"""Use this method to activate this ManagedDevice"""
return await self._apply_action(ACTION_ACTIVATE, requested_power)
async def deactivate(self):
"""Use this method to deactivate this ManagedDevice"""
return await self._apply_action(ACTION_DEACTIVATE, 0)
async def change_requested_power(self, requested_power):
"""Use this method to change the requested power of this ManagedDevice"""
return await self._apply_action(ACTION_CHANGE_POWER, requested_power)
def reset_next_date_available(self, action_type):
"""Incremente the next availability date to now + _duration_sec"""
if action_type == ACTION_ACTIVATE:
self._next_date_available = datetime.now(get_tz(self._hass)) + timedelta(
seconds=self._duration_sec
)
else:
self._next_date_available = datetime.now(get_tz(self._hass)) + timedelta(
seconds=self._duration_stop_sec
)
_LOGGER.debug(
"Next availability date for %s is %s", self._name, self._next_date_available
)
def reset_next_date_available_power(self):
"""Incremente the next availability date for power change to now + _duration_power_sec"""
self._next_date_available_power = datetime.now(get_tz(self._hass)) + timedelta(
seconds=self._duration_power_sec
)
_LOGGER.debug(
"Next availability date for power change for %s is %s",
self._name,
self._next_date_available_power,
)
# def init_power(self, power: int):
# """Initialise current_power and requested_power to the given value"""
# _LOGGER.debug(
# "Initializing power for entity '%s' with %s value", self._name, power
# )
# self._requested_power = self._current_power = power
def set_current_power_with_device_state(self):
"""Set the current power according to the real device state"""
if not self.is_active:
self._current_power = 0
_LOGGER.debug(
"Set current_power to 0 for device %s cause not active", self._name
)
return
if not self._can_change_power:
self._current_power = self._power_max
_LOGGER.debug(
"Set current_power to %s for device %s cause active and not can_change_power",
self._current_power,
self._name,
)
return
amps = self._hass.states.get(self._power_entity_id)
if not amps or amps.state in [None, STATE_UNKNOWN, STATE_UNAVAILABLE]:
self._current_power = self._power_min
_LOGGER.debug(
"Set current_power to %s for device %s cause can_change_power but amps is %s",
self._current_power,
self._name,
amps,
)
return
self._current_power = round(
float(amps.state) * self._convert_power_divide_factor
)
_LOGGER.debug(
"Set current_power to %s for device %s cause can_change_power and amps is %s",
self._current_power,
self._name,
amps.state,
)
def set_enable(self, enable: bool):
"""Enable or disable the ManagedDevice for Solar Optimizer"""
_LOGGER.info("%s - Set enable=%s", self.name, enable)
self._enable = enable
self.publish_enable_state_change()
@property
def is_enabled(self) -> bool:
"""return true if the managed device is enabled for solar optimisation"""
return self._enable
@property
def is_active(self) -> bool:
"""Check if device is active by getting the underlying state of the device"""
result = self._check_active_template.async_render(context={})
if result:
_LOGGER.debug("%s is active", self._name)
return result
@property
def is_usable(self) -> bool:
"""A device is usable for optimisation if the check_usable_template returns true and
if the device is not waiting for the end of its cycle and if the battery_soc_threshold is >= battery_soc"""
context = {}
now = datetime.now(get_tz(self._hass))
result = self._check_usable_template.async_render(context) and (
now >= self._next_date_available
or (self._can_change_power and now >= self._next_date_available_power)
)
if not result:
_LOGGER.debug("%s is not usable", self._name)
if result and self._battery_soc is not None and self._battery_soc_threshold is not None:
if self._battery_soc < self._battery_soc_threshold:
result = False
_LOGGER.debug("%s is not usable due to battery soc threshold (%s < %s)", self._name, self._battery_soc, self._battery_soc_threshold)
return result
@property
def is_waiting(self):
"""A device is waiting if the device is waiting for the end of its cycle"""
now = datetime.now(get_tz(self._hass))
result = now < self._next_date_available
if result:
_LOGGER.debug("%s is waiting", self._name)
return result
@property
def name(self):
"""The name of the ManagedDevice"""
return self._name
@property
def unique_id(self):
"""The id of the ManagedDevice"""
return self._unique_id
@property
def power_max(self):
"""The power max of the managed device"""
return self._power_max
@property
def power_min(self):
"""The power min of the managed device"""
return self._power_min
@property
def power_step(self):
"""The power step of the managed device"""
return self._power_step
@property
def duration_sec(self) -> int:
"""The duration a device is not available after a change of the managed device"""
return self._duration_sec
@property
def duration_stop_sec(self) -> int:
"""The duration a device is not available after a change of the managed device to stop"""
return self._duration_stop_sec
@property
def duration_power_sec(self) -> int:
"""The duration a device is not available after a change of the managed device for power change"""
return self._duration_power_sec
@property
def entity_id(self) -> str:
"""The entity_id of the device"""
return self._entity_id
@property
def power_entity_id(self) -> str:
"""The entity_id of the device which gives the current power"""
return self._power_entity_id
@property
def current_power(self) -> int:
"""The current_power of the device"""
return self._current_power
@property
def requested_power(self) -> int:
"""The requested_power of the device"""
return self._requested_power
@property
def can_change_power(self) -> bool:
"""true is the device can change its power"""
return self._can_change_power
@property
def next_date_available(self) -> datetime:
"""returns the next available date for state change"""
return self._next_date_available
@property
def next_date_available_power(self) -> datetime:
"""return the next available date for power change"""
return self._next_date_available_power
@property
def convert_power_divide_factor(self) -> int:
"""return"""
return self._convert_power_divide_factor
def set_battery_soc(self, battery_soc):
"""Define the battery soc. This is used with is_usable
to determine if the device is usable"""
self._battery_soc = battery_soc
def publish_enable_state_change(self) -> None:
"""Publish an event when the state is changed"""
self._hass.bus.fire(
event_type=EVENT_TYPE_SOLAR_OPTIMIZER_ENABLE_STATE_CHANGE,
event_data={
"device_unique_id": self._unique_id,
"is_enabled": self.is_enabled,
"is_active": self.is_active,
"is_usable": self.is_usable,
"is_waiting": self.is_waiting,
},
)

View File

@@ -0,0 +1,14 @@
{
"domain": "solar_optimizer",
"name": "Solar Optimizer",
"codeowners": [
"@jmcollin78"
],
"config_flow": true,
"documentation": "https://github.com/jmcollin78/solar_optimizer",
"integration_type": "device",
"iot_class": "local_polling",
"issue_tracker": "https://github.com/jmcollin78/solar_optimizer/issues",
"quality_scale": "silver",
"version": "1.7.0"
}

View File

@@ -0,0 +1,112 @@
""" A sensor entity that holds the result of the recuit simule algorithm """
import logging
from homeassistant.const import UnitOfPower
from homeassistant.core import callback, HomeAssistant
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.components.sensor import (
SensorEntity,
SensorDeviceClass,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.entity_platform import (
AddEntitiesCallback,
)
from .const import DOMAIN
from .coordinator import SolarOptimizerCoordinator
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Setup the entries of type Sensor"""
# Sets the config entries values to SolarOptimizer coordinator
coordinator: SolarOptimizerCoordinator = hass.data[DOMAIN]["coordinator"]
entity1 = SolarOptimizerSensorEntity(coordinator, hass, "best_objective")
entity2 = SolarOptimizerSensorEntity(coordinator, hass, "total_power")
entity3 = SolarOptimizerSensorEntity(coordinator, hass, "power_production")
entity4 = SolarOptimizerSensorEntity(coordinator, hass, "power_production_brut")
entity5 = SolarOptimizerSensorEntity(coordinator, hass, "battery_soc")
async_add_entities([entity1, entity2, entity3, entity4, entity5], False)
await coordinator.configure(entry)
class SolarOptimizerSensorEntity(CoordinatorEntity, SensorEntity):
"""The entity holding the algorithm calculation"""
def __init__(self, coordinator, hass, idx):
super().__init__(coordinator, context=idx)
self._hass = hass
self.idx = idx
self._attr_name = idx
self._attr_unique_id = "solar_optimizer_" + idx
self._attr_native_value = None
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if (
not self.coordinator
or not self.coordinator.data
or (value := self.coordinator.data.get(self.idx)) == None
):
_LOGGER.debug("No coordinator found or no data...")
return
self._attr_native_value = value
self.async_write_ha_state()
@property
def device_info(self):
# Retournez des informations sur le périphérique associé à votre entité
return {
"identifiers": {(DOMAIN, "solar_optimizer_device")},
"name": "Solar Optimizer",
# Autres attributs du périphérique ici
}
@property
def icon(self) -> str | None:
if self.idx == "best_objective":
return "mdi:bullseye-arrow"
elif self.idx == "total_power":
return "mdi:flash"
elif self.idx == "battery_soc":
return "mdi:battery"
else:
return "mdi:solar-power-variant"
@property
def device_class(self) -> SensorDeviceClass | None:
if self.idx == "best_objective":
return SensorDeviceClass.MONETARY
elif self.idx == "battery_soc":
return SensorDeviceClass.BATTERY
else:
return SensorDeviceClass.POWER
@property
def state_class(self) -> SensorStateClass | None:
if self.idx == "best_objective":
return SensorStateClass.TOTAL
else:
return SensorStateClass.MEASUREMENT
@property
def native_unit_of_measurement(self) -> str | None:
if self.idx == "best_objective":
return ""
elif self.idx == "battery_soc":
return "%"
else:
return UnitOfPower.WATT

View File

@@ -0,0 +1,3 @@
reload:
name: Reload
description: Reload Solar Optimizer configuration

View File

@@ -0,0 +1,384 @@
""" The Simulated Annealing (recuit simulé) algorithm"""
import logging
import random
import math
import copy
from .managed_device import ManagedDevice
_LOGGER = logging.getLogger(__name__)
DEBUG = False
class SimulatedAnnealingAlgorithm:
"""The class which implemenets the Simulated Annealing algorithm"""
# Paramètres de l'algorithme de recuit simulé
_temperature_initiale: float = 1000
_temperature_minimale: float = 0.1
_facteur_refroidissement: float = 0.95
_nombre_iterations: float = 1000
_equipements: list[ManagedDevice]
_puissance_totale_eqt_initiale: float
_cout_achat: float = 15 # centimes
_cout_revente: float = 10 # centimes
_taxe_revente: float = 0.13 # pourcentage
_consommation_net: float
_production_solaire: float
def __init__(
self,
initial_temp: float,
min_temp: float,
cooling_factor: float,
max_iteration_number: int,
):
"""Initialize the algorithm with values"""
self._temperature_initiale = initial_temp
self._temperature_minimale = min_temp
self._facteur_refroidissement = cooling_factor
self._nombre_iterations = max_iteration_number
_LOGGER.info(
"Initializing the SimulatedAnnealingAlgorithm with initial_temp=%.2f min_temp=%.2f cooling_factor=%.2f max_iterations_number=%d",
self._temperature_initiale,
self._temperature_minimale,
self._facteur_refroidissement,
self._nombre_iterations,
)
def recuit_simule(
self,
devices: list[ManagedDevice],
power_consumption: float,
solar_power_production: float,
sell_cost: float,
buy_cost: float,
sell_tax_percent: float,
battery_soc: float
):
"""The entrypoint of the algorithm:
You should give:
- devices: a list of ManagedDevices. devices that are is_usable false are not taken into account
- power_consumption: the current power consumption. Can be negeative if power is given back to grid
- solar_power_production: the solar production power
- sell_cost: the sell cost of energy
- buy_cost: the buy cost of energy
- sell_tax_percent: a sell taxe applied to sell energy (a percentage)
In return you will have:
- best_solution: a list of object in whitch name, power_max and state are set,
- best_objectif: the measure of the objective for that solution,
- total_power_consumption: the total of power consumption for all equipments which should be activated (state=True)
"""
if (
len(devices) <= 0 # pylint: disable=too-many-boolean-expressions
or power_consumption is None
or solar_power_production is None
or sell_cost is None
or buy_cost is None
or sell_tax_percent is None
):
_LOGGER.info(
"Not all informations are available for Simulated Annealign algorithm to work. Calculation is abandoned"
)
return [], -1, -1
_LOGGER.debug(
"Calling recuit_simule with power_consumption=%.2f, solar_power_production=%.2f sell_cost=%.2f, buy_cost=%.2f, tax=%.2f%% devices=%s",
power_consumption,
solar_power_production,
sell_cost,
buy_cost,
sell_tax_percent,
devices,
)
self._cout_achat = buy_cost
self._cout_revente = sell_cost
self._taxe_revente = sell_tax_percent
self._consommation_net = power_consumption
self._production_solaire = solar_power_production
self._equipements = []
for _, device in enumerate(devices):
if not device.is_enabled:
_LOGGER.debug("%s is disabled. Forget it", device.name)
continue
device.set_battery_soc(battery_soc)
usable = device.is_usable
waiting = device.is_waiting
# Force deactivation if active, not usable and not waiting
force_state = (
False
if device.is_active and not usable and not waiting
else device.is_active
)
self._equipements.append(
{
"power_max": device.power_max,
"power_min": device.power_min,
"power_step": device.power_step,
"current_power": device.current_power, # if force_state else 0,
# Initial Requested power is the current power if usable
"requested_power": device.current_power, # if force_state else 0,
"name": device.name,
"state": force_state,
"is_usable": device.is_usable,
"is_waiting": waiting,
"can_change_power": device.can_change_power,
}
)
if DEBUG:
_LOGGER.debug("enabled _equipements are: %s", self._equipements)
# Générer une solution initiale
solution_actuelle = self.generer_solution_initiale(self._equipements)
meilleure_solution = solution_actuelle
meilleure_objectif = self.calculer_objectif(solution_actuelle)
temperature = self._temperature_initiale
for _ in range(self._nombre_iterations):
# Générer un voisin
objectif_actuel = self.calculer_objectif(solution_actuelle)
if DEBUG:
_LOGGER.debug("Objectif actuel : %.2f", objectif_actuel)
voisin = self.permuter_equipement(solution_actuelle)
# Calculer les objectifs pour la solution actuelle et le voisin
objectif_voisin = self.calculer_objectif(voisin)
if DEBUG:
_LOGGER.debug("Objectif voisin : %2.f", objectif_voisin)
# Accepter le voisin si son objectif est meilleur ou si la consommation totale n'excède pas la production solaire
if objectif_voisin < objectif_actuel:
_LOGGER.debug("---> On garde l'objectif voisin")
solution_actuelle = voisin
if objectif_voisin < self.calculer_objectif(meilleure_solution):
_LOGGER.debug("---> C'est la meilleure jusque là")
meilleure_solution = voisin
meilleure_objectif = objectif_voisin
else:
# Accepter le voisin avec une certaine probabilité
probabilite = math.exp(
(objectif_actuel - objectif_voisin) / temperature
)
if (seuil := random.random()) < probabilite:
solution_actuelle = voisin
if DEBUG:
_LOGGER.debug(
"---> On garde l'objectif voisin car seuil (%.2f) inférieur à proba (%.2f)",
seuil,
probabilite,
)
else:
if DEBUG:
_LOGGER.debug("--> On ne prend pas")
# Réduire la température
temperature *= self._facteur_refroidissement
if DEBUG:
_LOGGER.debug(" !! Temperature %.2f", temperature)
if temperature < self._temperature_minimale:
break
return (
meilleure_solution,
meilleure_objectif,
self.consommation_equipements(meilleure_solution),
)
def calculer_objectif(self, solution) -> float:
"""Calcul de l'objectif : minimiser le surplus de production solaire
rejets = 0 if consommation_net >=0 else -consommation_net
consommation_solaire = min(production_solaire, production_solaire - rejets)
consommation_totale = consommation_net + consommation_solaire
"""
puissance_totale_eqt = self.consommation_equipements(solution)
diff_puissance_totale_eqt = (
puissance_totale_eqt - self._puissance_totale_eqt_initiale
)
new_consommation_net = self._consommation_net + diff_puissance_totale_eqt
new_rejets = 0 if new_consommation_net >= 0 else -new_consommation_net
new_import = 0 if new_consommation_net < 0 else new_consommation_net
new_consommation_solaire = min(
self._production_solaire, self._production_solaire - new_rejets
)
new_consommation_totale = (
new_consommation_net + new_rejets
) + new_consommation_solaire
if DEBUG:
_LOGGER.debug(
"Objectif : cette solution ajoute %.3fW a la consommation initial. Nouvelle consommation nette=%.3fW. Nouveaux rejets=%.3fW. Nouvelle conso totale=%.3fW",
diff_puissance_totale_eqt,
new_consommation_net,
new_rejets,
new_consommation_totale,
)
cout_revente_impose = self._cout_revente * (1.0 - self._taxe_revente / 100.0)
coef_import = (self._cout_achat) / (self._cout_achat + cout_revente_impose)
coef_rejets = (cout_revente_impose) / (self._cout_achat + cout_revente_impose)
return coef_import * new_import + coef_rejets * new_rejets
def generer_solution_initiale(self, solution):
"""Generate the initial solution (which is the solution given in argument) and calculate the total initial power"""
self._puissance_totale_eqt_initiale = self.consommation_equipements(solution)
return copy.deepcopy(solution)
def consommation_equipements(self, solution):
"""The total power consumption for all active equipement"""
return sum(
equipement["requested_power"]
for _, equipement in enumerate(solution)
if equipement["state"]
)
def calculer_new_power(
self, current_power, power_step, power_min, power_max, can_switch_off
):
"""Calcul une nouvelle puissance"""
choices = []
if current_power > power_min or can_switch_off:
choices.append(-1)
if current_power < power_max:
choices.append(1)
if len(choices) <= 0:
# No changes
return current_power
power_add = random.choice(choices) * power_step
_LOGGER.debug("Adding %d power to current_power (%d)", power_add, current_power)
requested_power = current_power + power_add
_LOGGER.debug("New requested_power is %s", requested_power)
return requested_power
# if requested_power < power_min:
# deactivate the equipment
# requested_power = 0
# elif requested_power > power_max:
# Do nothing
# requested_power = current_power
def permuter_equipement(self, solution):
"""Permuter le state d'un equipement eau hasard"""
voisin = copy.deepcopy(solution)
usable = [eqt for eqt in voisin if eqt["is_usable"]]
if len(usable) <= 0:
return voisin
eqt = random.choice(usable)
# name = eqt["name"]
state = eqt["state"]
can_change_power = eqt["can_change_power"]
is_waiting = eqt["is_waiting"]
# Current power is the last requested_power
current_power = eqt.get("requested_power")
power_max = eqt.get("power_max")
power_step = eqt.get("power_step")
if can_change_power:
power_min = eqt.get("power_min")
else:
# If power is not manageable, min = max
power_min = power_max
# On veut gérer le is_waiting qui interdit d'allumer ou éteindre un eqt usable.
# On veut pouvoir changer la puissance si l'eqt est déjà allumé malgré qu'il soit waiting.
# Usable veut dire qu'on peut l'allumer/éteindre OU qu'on peut changer la puissance
# if not can_change_power and is_waiting:
# -> on ne fait rien (mais ne devrait pas arriver car il ne serait pas usable dans ce cas)
#
# if state and can_change_power and is_waiting:
# -> change power mais sans l'éteindre (requested_power >= power_min)
#
# if state and can_change_power and not is_waiting:
# -> change power avec extinction possible
#
# if not state and not is_waiting
# -> allumage
#
# if state and not is_waiting
# -> extinction
#
if (not can_change_power and is_waiting) or (
not state and can_change_power and is_waiting
):
_LOGGER.debug("not can_change_power and is_waiting -> do nothing")
return voisin
if state and can_change_power and is_waiting:
# calculated a new power but do not switch off (because waiting)
requested_power = self.calculer_new_power(
current_power, power_step, power_min, power_max, False
)
assert (
requested_power > 0
), "Requested_power should be > 0 because is_waiting is True"
elif state and can_change_power and not is_waiting:
# change power and accept switching off
requested_power = self.calculer_new_power(
current_power, power_step, power_min, power_max, True
)
if requested_power < power_min:
# deactivate the equipment
eqt["state"] = False
requested_power = 0
elif not state and not is_waiting:
# Allumage
eqt["state"] = not state
requested_power = power_min
elif state and not is_waiting:
# Extinction
eqt["state"] = not state
requested_power = 0
elif "requested_power" not in locals():
_LOGGER.error("We should not be there. eqt=%s", eqt)
assert False, "Requested power n'a pas été calculé. Ce n'est pas normal"
eqt["requested_power"] = requested_power
# old code that was working
# if not state or not can_change_power:
# eqt["state"] = not state
# # We always start at the min power
# eqt["requested_power"] = power_min
# else:
# _LOGGER.debug("Managing a can_change_power eqt which is already Activated")
# # Deactivate eqt or change power
# power_add = random.choice([-1, 1]) * power_step
# _LOGGER.debug(
# "Adding %d power to current_power (%d)", power_add, current_power
# )
# requested_power = current_power + power_add
# if requested_power < power_min:
# # deactivate the equipment
# eqt["state"] = False
# requested_power = 0
# elif requested_power > power_max:
# # Do nothing
# requested_power = current_power
# _LOGGER.debug("New requested_power is %s for eqt %s", requested_power, name)
# # Update the solution with current_power and
# eqt["requested_power"] = requested_power
if DEBUG:
_LOGGER.debug(
" -- On permute %s puissance max de %.2f. Il passe à %s",
eqt["name"],
eqt["requested_power"],
eqt["state"],
)
return voisin

View File

@@ -0,0 +1,61 @@
{
"title": "solar_optimizer",
"config": {
"flow_title": "Solar Optimizer configuration",
"step": {
"user": {
"title": "General parameters",
"description": "Give the general parameters",
"data": {
"refresh_period_sec": "Refresh period",
"power_consumption_entity_id": "Net power consumption",
"power_production_entity_id": "Solar power production",
"sell_cost_entity_id": "Energy sell price",
"buy_cost_entity_id": "Energy buy price",
"sell_tax_percent_entity_id": "Sell taxe percent",
"smooth_production": "Smooth the solar production",
"battery_soc_entity_id": "Battery soc"
},
"data_description": {
"refresh_period_sec": "Refresh period in seconds. Warning heavy calculs are done at each period. Don't refresh to often",
"power_consumption_entity_id": "the entity_id of the net power consumption sensor. Net power should be negative if power is exported to grid.",
"power_production_entity_id": "the entity_id of the solar power production sensor.",
"sell_cost_entity_id": "The entity_id which holds the current energy sell price.",
"buy_cost_entity_id": "The entity_id which holds the current energy buy price.",
"sell_tax_percent_entity_id": "The energy resell tax percent (0 to 100)",
"smooth_production": "If checked, the solar production will be smoothed to avoid hard variation",
"battery_soc_entity_id": "Battery state of charge in %. If you don't have battery, keep it empty"
}
}
}
},
"options": {
"flow_title": "Solar Optimizer options configuration",
"step": {
"init": {
"title": "General parameters",
"description": "Give the general parameters",
"data": {
"refresh_period_sec": "Refresh period",
"power_consumption_entity_id": "Net power consumption",
"power_production_entity_id": "Solar power production",
"sell_cost_entity_id": "Energy sell price",
"buy_cost_entity_id": "Energy buy price",
"sell_tax_percent_entity_id": "Sell taxe percent",
"smooth_production": "Smooth the solar production",
"battery_soc_entity_id": "Battery soc"
},
"data_description": {
"refresh_period_sec": "Refresh period in seconds. Warning heavy calculs are done at each period. Don't refresh to often",
"power_consumption_entity_id": "the entity_id of the net power consumption sensor. Net power should be negative if power is exported to grid.",
"power_production_entity_id": "the entity_id of the solar power production sensor.",
"sell_cost_entity_id": "The entity_id which holds the current energy sell price.",
"buy_cost_entity_id": "The entity_id which holds the current energy buy price.",
"sell_tax_percent_entity_id": "The energy resell tax percent (0 to 100)",
"smooth_production": "If checked, the solar production will be smoothed to avoid hard variation",
"battery_soc_entity_id": "Battery state of charge in %. If you don't have battery, keep it empty"
}
}
}
}
}

View File

@@ -0,0 +1,337 @@
""" A bonary sensor entity that holds the state of each managed_device """
import logging
from datetime import datetime
from typing import Any
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, STATE_ON
from homeassistant.core import callback, HomeAssistant, State, Event
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.components.switch import (
SwitchEntity,
)
from homeassistant.helpers.entity_platform import (
AddEntitiesCallback,
)
from homeassistant.helpers.event import (
async_track_state_change_event,
)
from .const import (
DOMAIN,
name_to_unique_id,
get_tz,
EVENT_TYPE_SOLAR_OPTIMIZER_ENABLE_STATE_CHANGE,
)
from .coordinator import SolarOptimizerCoordinator
from .managed_device import ManagedDevice
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, _, async_add_entities: AddEntitiesCallback
) -> None:
"""Setup the entries of type Binary sensor, one for each ManagedDevice"""
_LOGGER.debug("Calling switch.async_setup_entry")
coordinator: SolarOptimizerCoordinator = hass.data[DOMAIN]["coordinator"]
entities = []
for _, device in enumerate(coordinator.devices):
entity = ManagedDeviceSwitch(
coordinator,
hass,
device.name,
name_to_unique_id(device.name),
device.entity_id,
)
if entity is not None:
entities.append(entity)
entity = ManagedDeviceEnable(hass, device)
if entity is not None:
entities.append(entity)
async_add_entities(entities)
class ManagedDeviceSwitch(CoordinatorEntity, SwitchEntity):
"""The entity holding the algorithm calculation"""
_entity_component_unrecorded_attributes = (
SwitchEntity._entity_component_unrecorded_attributes.union(
frozenset(
{
"is_enabled",
"is_active",
"is_waiting",
"is_usable",
"can_change_power",
"duration_sec",
"duration_power_sec",
"power_min",
"power_max",
"next_date_available",
"next_date_available_power",
"battery_soc_threshold",
"battery_soc",
}
)
)
)
def __init__(self, coordinator, hass, name, idx, entity_id):
_LOGGER.debug("Adding ManagedDeviceSwitch for %s", name)
super().__init__(coordinator, context=idx)
self._hass: HomeAssistant = hass
self.idx = idx
self._attr_name = "Solar Optimizer " + name
self._attr_unique_id = "solar_optimizer_" + idx
self._entity_id = entity_id
# Try to get the state if it exists
device: ManagedDevice = None
if (device := coordinator.get_device_by_unique_id(self.idx)) is not None:
self._attr_is_on = device.is_active
else:
self._attr_is_on = None
async def async_added_to_hass(self) -> None:
"""The entity have been added to hass, listen to state change of the underlying entity"""
await super().async_added_to_hass()
# Arme l'écoute de la première entité
listener_cancel = async_track_state_change_event(
self.hass,
[self._entity_id],
self._on_state_change,
)
# desarme le timer lors de la destruction de l'entité
self.async_on_remove(listener_cancel)
# desarme le timer lors de la destruction de l'entité
self.async_on_remove(
self._hass.bus.async_listen(
event_type=EVENT_TYPE_SOLAR_OPTIMIZER_ENABLE_STATE_CHANGE,
listener=self._on_enable_state_change,
)
)
@callback
async def _on_enable_state_change(self, event: Event) -> None:
"""Triggered when the ManagedDevice enable state have change"""
# is it for me ?
if (
not event.data
or (device_id := event.data.get("device_unique_id")) != self.idx
):
return
# search for coordinator and device
if not self.coordinator or not (
device := self.coordinator.get_device_by_unique_id(device_id)
):
return
_LOGGER.info(
"Changing enabled state for %s to %s", device_id, device.is_enabled
)
self.update_custom_attributes(device)
self.async_write_ha_state()
@callback
async def _on_state_change(self, event: Event) -> None:
"""The entity have change its state"""
_LOGGER.info(
"Appel de on_state_change à %s avec l'event %s", datetime.now(), event
)
if not event.data:
return
# search for coordinator and device
if not self.coordinator or not (
device := self.coordinator.get_device_by_unique_id(self.idx)
):
return
new_state: State = event.data.get("new_state")
# old_state: State = event.data.get("old_state")
if new_state is None or new_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
_LOGGER.debug("Pas d'état disponible. Evenement ignoré")
return
# On recherche la date de l'event pour la stocker dans notre état
new_state = new_state.state == STATE_ON
if new_state == self._attr_is_on:
return
self._attr_is_on = new_state
# On sauvegarde le nouvel état
self.update_custom_attributes(device)
self.async_write_ha_state()
def update_custom_attributes(self, device):
"""Add some custom attributes to the entity"""
current_tz = get_tz(self._hass)
self._attr_extra_state_attributes: dict(str, str) = {
"is_enabled": device.is_enabled,
"is_active": device.is_active,
"is_waiting": device.is_waiting,
"is_usable": device.is_usable,
"can_change_power": device.can_change_power,
"current_power": device.current_power,
"requested_power": device.requested_power,
"duration_sec": device.duration_sec,
"duration_power_sec": device.duration_power_sec,
"power_min": device.power_min,
"power_max": device.power_max,
"next_date_available": device.next_date_available.astimezone(
current_tz
).isoformat(),
"next_date_available_power": device.next_date_available_power.astimezone(
current_tz
).isoformat(),
"battery_soc_threshold": device._battery_soc_threshold,
"battery_soc": device._battery_soc,
}
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
_LOGGER.debug("Calling _handle_coordinator_update for %s", self._attr_name)
if not self.coordinator or not self.coordinator.data:
_LOGGER.debug("No coordinator found or no data...")
return
device: ManagedDevice = self.coordinator.data.get(self.idx)
if not device:
# it is possible to not have device in coordinator update (if device is not enabled)
_LOGGER.debug("No device %s found ...", self.idx)
return
self._attr_is_on = device.is_active
self.update_custom_attributes(device)
self.async_write_ha_state()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
if not self.coordinator or not self.coordinator.data:
return
_LOGGER.info("Turn_on Solar Optimizer switch %s", self._attr_name)
# search for coordinator and device
if not self.coordinator or not (
device := self.coordinator.get_device_by_unique_id(self.idx)
):
return
if not self._attr_is_on:
await device.activate()
self._attr_is_on = True
self.update_custom_attributes(device)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity on."""
if not self.coordinator or not self.coordinator.data:
return
_LOGGER.info("Turn_on Solar Optimizer switch %s", self._attr_name)
# search for coordinator and device
if not self.coordinator or not (
device := self.coordinator.get_device_by_unique_id(self.idx)
):
return
if self._attr_is_on:
await device.deactivate()
self._attr_is_on = False
self.update_custom_attributes(device)
self.async_write_ha_state()
@property
def device_info(self):
# Retournez des informations sur le périphérique associé à votre entité
return {
"identifiers": {(DOMAIN, "solar_optimizer_device")},
"name": "Solar Optimizer",
# Autres attributs du périphérique ici
}
@property
def get_attr_extra_state_attributes(self):
"""Get the extra state attributes for the entity"""
return self._attr_extra_state_attributes
class ManagedDeviceEnable(SwitchEntity, RestoreEntity):
"""The that enables the ManagedDevice optimisation with"""
_device: ManagedDevice
def __init__(self, hass: HomeAssistant, device: ManagedDevice):
self._hass: HomeAssistant = hass
self._device = device
self._attr_name = "Enable Solar Optimizer " + device.name
self._attr_unique_id = "solar_optimizer_enable_" + name_to_unique_id(
device.name
)
self._attr_is_on = True
@property
def device_info(self):
# Retournez des informations sur le périphérique associé à votre entité
return {
"identifiers": {(DOMAIN, "solar_optimizer_device")},
"name": "Solar Optimizer",
# Autres attributs du périphérique ici
}
@property
def icon(self) -> str | None:
return "mdi:check"
async def async_added_to_hass(self):
await super().async_added_to_hass()
# Récupérer le dernier état sauvegardé de l'entité
last_state = await self.async_get_last_state()
# Si l'état précédent existe, vous pouvez l'utiliser
if last_state is not None:
self._attr_is_on = last_state.state == "on"
else:
# Si l'état précédent n'existe pas, initialisez l'état comme vous le souhaitez
self._attr_is_on = True
# this breaks the start of integration
self.update_device_enabled()
@callback
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
self._attr_is_on = True
self.async_write_ha_state()
self.update_device_enabled()
@callback
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
self._attr_is_on = False
self.async_write_ha_state()
self.update_device_enabled()
def update_device_enabled(self) -> None:
"""Update the device is enabled flag"""
if not self._device:
return
self._device.set_enable(self._attr_is_on)

View File

@@ -0,0 +1,61 @@
{
"title": "solar_optimizer",
"config": {
"flow_title": "Solar Optimizer configuration",
"step": {
"user": {
"title": "General parameters",
"description": "Give the general parameters",
"data": {
"refresh_period_sec": "Refresh period",
"power_consumption_entity_id": "Net power consumption",
"power_production_entity_id": "Solar power production",
"sell_cost_entity_id": "Energy sell price",
"buy_cost_entity_id": "Energy buy price",
"sell_tax_percent_entity_id": "Sell taxe percent",
"smooth_production": "Smooth the solar production",
"battery_soc_entity_id": "Battery soc"
},
"data_description": {
"refresh_period_sec": "Refresh period in seconds. Warning heavy calculs are done at each period. Don't refresh to often",
"power_consumption_entity_id": "the entity_id of the net power consumption sensor. Net power should be negative if power is exported to grid.",
"power_production_entity_id": "the entity_id of the solar power production sensor.",
"sell_cost_entity_id": "The entity_id which holds the current energy sell price.",
"buy_cost_entity_id": "The entity_id which holds the current energy buy price.",
"sell_tax_percent_entity_id": "The energy resell tax percent (0 to 100)",
"smooth_production": "If checked, the solar production will be smoothed to avoid hard variation",
"battery_soc_entity_id": "Battery state of charge in %. If you don't have battery, keep it empty"
}
}
}
},
"options": {
"flow_title": "Solar Optimizer options configuration",
"step": {
"init": {
"title": "General parameters",
"description": "Give the general parameters",
"data": {
"refresh_period_sec": "Refresh period",
"power_consumption_entity_id": "Net power consumption",
"power_production_entity_id": "Solar power production",
"sell_cost_entity_id": "Energy sell price",
"buy_cost_entity_id": "Energy buy price",
"sell_tax_percent_entity_id": "Sell taxe percent",
"smooth_production": "Smooth the solar production",
"battery_soc_entity_id": "Battery soc"
},
"data_description": {
"refresh_period_sec": "Refresh period in seconds. Warning heavy calculs are done at each period. Don't refresh to often",
"power_consumption_entity_id": "the entity_id of the net power consumption sensor. Net power should be negative if power is exported to grid.",
"power_production_entity_id": "the entity_id of the solar power production sensor.",
"sell_cost_entity_id": "The entity_id which holds the current energy sell price.",
"buy_cost_entity_id": "The entity_id which holds the current energy buy price.",
"sell_tax_percent_entity_id": "The energy resell tax percent (0 to 100)",
"smooth_production": "If checked, the solar production will be smoothed to avoid hard variation",
"battery_soc_entity_id": "Battery state of charge in %. If you don't have battery, keep it empty"
}
}
}
}
}

View File

@@ -0,0 +1,61 @@
{
"title": "solar_optimizer",
"config": {
"flow_title": "Solar Optimizer configuration",
"step": {
"user": {
"title": "General parameters",
"description": "Give the general parameters",
"data": {
"refresh_period_sec": "Refresh period",
"power_consumption_entity_id": "Net power consumption",
"power_production_entity_id": "Solar power production",
"sell_cost_entity_id": "Energy sell price",
"buy_cost_entity_id": "Energy buy price",
"sell_tax_percent_entity_id": "Sell taxe percent",
"smooth_production": "Smooth the solar production",
"battery_soc_entity_id": "Battery soc"
},
"data_description": {
"refresh_period_sec": "Refresh period in seconds. Warning heavy calculs are done at each period. Don't refresh to often",
"power_consumption_entity_id": "the entity_id of the net power consumption sensor. Net power should be negative if power is exported to grid.",
"power_production_entity_id": "the entity_id of the solar power production sensor.",
"sell_cost_entity_id": "The entity_id which holds the current energy sell price.",
"buy_cost_entity_id": "The entity_id which holds the current energy buy price.",
"sell_tax_percent_entity_id": "The energy resell tax percent (0 to 100)",
"smooth_production": "If checked, the solar production will be smoothed to avoid hard variation",
"battery_soc_entity_id": "Battery state of charge in %. If you don't have battery, keep it empty"
}
}
}
},
"options": {
"flow_title": "Solar Optimizer options configuration",
"step": {
"init": {
"title": "General parameters",
"description": "Give the general parameters",
"data": {
"refresh_period_sec": "Refresh period",
"power_consumption_entity_id": "Net power consumption",
"power_production_entity_id": "Solar power production",
"sell_cost_entity_id": "Energy sell price",
"buy_cost_entity_id": "Energy buy price",
"sell_tax_percent_entity_id": "Sell taxe percent",
"smooth_production": "Smooth the solar production",
"battery_soc_entity_id": "Battery soc"
},
"data_description": {
"refresh_period_sec": "Refresh period in seconds. Warning heavy calculs are done at each period. Don't refresh to often",
"power_consumption_entity_id": "the entity_id of the net power consumption sensor. Net power should be negative if power is exported to grid.",
"power_production_entity_id": "the entity_id of the solar power production sensor.",
"sell_cost_entity_id": "The entity_id which holds the current energy sell price.",
"buy_cost_entity_id": "The entity_id which holds the current energy buy price.",
"sell_tax_percent_entity_id": "The energy resell tax percent (0 to 100)",
"smooth_production": "If checked, the solar production will be smoothed to avoid hard variation",
"battery_soc_entity_id": "Battery state of charge in %. If you don't have battery, keep it empty"
}
}
}
}
}

View File

@@ -13,6 +13,7 @@ from homeassistant.const import SERVICE_RELOAD, EVENT_HOMEASSISTANT_STARTED
from homeassistant.config_entries import ConfigEntry, ConfigType
from homeassistant.core import HomeAssistant, CoreState, callback
from homeassistant.helpers.service import async_register_admin_service
from .base_thermostat import BaseThermostat
@@ -115,7 +116,8 @@ async def async_setup(
else:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _async_startup_internal)
hass.helpers.service.async_register_admin_service(
async_register_admin_service(
hass,
DOMAIN,
SERVICE_RELOAD,
_handle_reload,

View File

@@ -22,7 +22,6 @@ from homeassistant.components.climate import ClimateEntity
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.device_registry import DeviceInfo, DeviceEntryType
from homeassistant.helpers.typing import EventType as HASSEventType
from homeassistant.helpers.event import (
async_track_state_change_event,
@@ -737,37 +736,37 @@ class BaseThermostat(ClimateEntity, RestoreEntity, Generic[T]):
)
need_write_state = True
# try to acquire window entity state
if self._window_sensor_entity_id:
window_state = self.hass.states.get(self._window_sensor_entity_id)
if window_state and window_state.state not in (
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
self._window_state = window_state.state == STATE_ON
_LOGGER.debug(
"%s - Window state have been retrieved: %s",
self,
self._window_state,
)
need_write_state = True
# try to acquire window entity state
if self._window_sensor_entity_id:
window_state = self.hass.states.get(self._window_sensor_entity_id)
if window_state and window_state.state not in (
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
self._window_state = window_state.state == STATE_ON
_LOGGER.debug(
"%s - Window state have been retrieved: %s",
self,
self._window_state,
)
need_write_state = True
# try to acquire motion entity state
if self._motion_sensor_entity_id:
motion_state = self.hass.states.get(self._motion_sensor_entity_id)
if motion_state and motion_state.state not in (
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
self._motion_state = motion_state.state
_LOGGER.debug(
"%s - Motion state have been retrieved: %s",
self,
self._motion_state,
)
# recalculate the right target_temp in activity mode
await self._async_update_motion_temp()
need_write_state = True
# try to acquire motion entity state
if self._motion_sensor_entity_id:
motion_state = self.hass.states.get(self._motion_sensor_entity_id)
if motion_state and motion_state.state not in (
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
self._motion_state = motion_state.state
_LOGGER.debug(
"%s - Motion state have been retrieved: %s",
self,
self._motion_state,
)
# recalculate the right target_temp in activity mode
await self._async_update_motion_temp()
need_write_state = True
if self._presence_on:
# try to acquire presence entity state
@@ -1377,11 +1376,19 @@ class BaseThermostat(ClimateEntity, RestoreEntity, Generic[T]):
if preset_mode == PRESET_POWER:
return self._power_temp
if preset_mode == PRESET_ACTIVITY:
motion_preset = (
self._motion_preset
if self._motion_state == STATE_ON
else self._no_motion_preset
)
if self._ac_mode and self._hvac_mode == HVACMode.COOL:
motion_preset = (
self._motion_preset + PRESET_AC_SUFFIX
if self._motion_state == STATE_ON
else self._no_motion_preset + PRESET_AC_SUFFIX
)
else:
motion_preset = (
self._motion_preset
if self._motion_state == STATE_ON
else self._no_motion_preset
)
if motion_preset in self._presets:
return self._presets[motion_preset]
else:
@@ -1646,6 +1653,7 @@ class BaseThermostat(ClimateEntity, RestoreEntity, Generic[T]):
_LOGGER.debug("%s - Motion delay condition is satisfied", self)
self._motion_state = new_state.state
if self._attr_preset_mode == PRESET_ACTIVITY:
new_preset = (
self._motion_preset
if self._motion_state == STATE_ON
@@ -1658,6 +1666,7 @@ class BaseThermostat(ClimateEntity, RestoreEntity, Generic[T]):
)
# We do not change the preset which is kept to ACTIVITY but only the target_temperature
# We take the presence into account
await self._async_internal_set_temperature(
self.find_preset_temp(new_preset)
)
@@ -1780,7 +1789,7 @@ class BaseThermostat(ClimateEntity, RestoreEntity, Generic[T]):
_LOGGER.error("Unable to update external temperature from sensor: %s", ex)
@callback
async def _async_power_changed(self, event: HASSEventType[EventStateChangedData]):
async def _async_power_changed(self, event: Event[EventStateChangedData]):
"""Handle power changes."""
_LOGGER.debug("Thermostat %s - Receive new Power event", self.name)
_LOGGER.debug(event)
@@ -1806,9 +1815,7 @@ class BaseThermostat(ClimateEntity, RestoreEntity, Generic[T]):
_LOGGER.error("Unable to update current_power from sensor: %s", ex)
@callback
async def _async_max_power_changed(
self, event: HASSEventType[EventStateChangedData]
):
async def _async_max_power_changed(self, event: Event[EventStateChangedData]):
"""Handle power max changes."""
_LOGGER.debug("Thermostat %s - Receive new Power Max event", self.name)
_LOGGER.debug(event)
@@ -1833,9 +1840,7 @@ class BaseThermostat(ClimateEntity, RestoreEntity, Generic[T]):
_LOGGER.error("Unable to update current_power from sensor: %s", ex)
@callback
async def _async_presence_changed(
self, event: HASSEventType[EventStateChangedData]
):
async def _async_presence_changed(self, event: Event[EventStateChangedData]):
"""Handle presence changes."""
new_state = event.data.get("new_state")
_LOGGER.info(
@@ -1896,16 +1901,23 @@ class BaseThermostat(ClimateEntity, RestoreEntity, Generic[T]):
):
return
new_preset = (
self._motion_preset
if self._motion_state == STATE_ON
else self._no_motion_preset
)
_LOGGER.info(
"%s - Motion condition have changes. New preset temp will be %s",
self,
new_preset,
)
# We do not change the preset which is kept to ACTIVITY but only the target_temperature
# We take the presence into account
await self._async_internal_set_temperature(
self._presets.get(
(
self._motion_preset
if self._motion_state == STATE_ON
else self._no_motion_preset
),
None,
)
self.find_preset_temp(new_preset)
)
_LOGGER.debug(
"%s - regarding motion, target_temp have been set to %.2f",
self,

View File

@@ -10,6 +10,7 @@ the keep_alive setting of Home Assistant's Generic Thermostat integration:
import logging
from collections.abc import Awaitable, Callable
from datetime import timedelta, datetime
from time import monotonic
from homeassistant.core import HomeAssistant, CALLBACK_TYPE
from homeassistant.helpers.event import async_track_time_interval
@@ -18,6 +19,79 @@ from homeassistant.helpers.event import async_track_time_interval
_LOGGER = logging.getLogger(__name__)
class BackoffTimer:
"""Exponential backoff timer with a non-blocking polling-style implementation.
Usage example:
timer = BackoffTimer(multiplier=1.5, upper_limit_sec=600)
while some_condition:
if timer.is_ready():
do_something()
"""
def __init__(
self,
*,
multiplier=2.0,
lower_limit_sec=30,
upper_limit_sec=86400,
initially_ready=True,
):
"""Initialize a BackoffTimer instance.
Args:
multiplier (int, optional): Period multiplier applied when is_ready() is True.
lower_limit_sec (int, optional): Initial backoff period in seconds.
upper_limit_sec (int, optional): Maximum backoff period in seconds.
initially_ready (bool, optional): Whether is_ready() should return True the
first time it is called, or after a call to reset().
"""
self._multiplier = multiplier
self._lower_limit_sec = lower_limit_sec
self._upper_limit_sec = upper_limit_sec
self._initially_ready = initially_ready
self._timestamp = 0
self._period_sec = self._lower_limit_sec
@property
def in_progress(self) -> bool:
"""Whether the backoff timer is in progress (True after a call to is_ready())."""
return bool(self._timestamp)
def reset(self):
"""Reset a BackoffTimer instance."""
self._timestamp = 0
self._period_sec = self._lower_limit_sec
def is_ready(self) -> bool:
"""Check whether an exponentially increasing period of time has passed.
Whenever is_ready() returns True, the timer period is multiplied so that
it takes longer until is_ready() returns True again.
Returns:
bool: True if enough time has passed since one of the following events,
in relation to an instance of this class:
- The last time when this method returned True, if it ever did.
- Or else, when this method was first called after a call to reset().
- Or else, when this method was first called.
False otherwise.
"""
now = monotonic()
if self._timestamp == 0:
self._timestamp = now
return self._initially_ready
elif now - self._timestamp >= self._period_sec:
self._timestamp = now
self._period_sec = max(
self._lower_limit_sec,
min(self._upper_limit_sec, self._period_sec * self._multiplier),
)
return True
return False
class IntervalCaller:
"""Repeatedly call a given async action function at a given regular interval.
@@ -28,6 +102,7 @@ class IntervalCaller:
self._hass = hass
self._interval_sec = interval_sec
self._remove_handle: CALLBACK_TYPE | None = None
self.backoff_timer = BackoffTimer()
@property
def interval_sec(self) -> float:

View File

@@ -14,6 +14,6 @@
"quality_scale": "silver",
"requirements": [],
"ssdp": [],
"version": "6.2.3",
"version": "6.2.9",
"zeroconf": []
}
}

View File

@@ -283,7 +283,7 @@ class CentralConfigTemperatureNumber(
self.entity_id = f"{NUMBER_DOMAIN}.{slugify(name)}_preset_{preset_name}"
self._attr_unique_id = f"central_configuration_preset_{preset_name}"
self._attr_device_class = NumberDeviceClass.TEMPERATURE
self._attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
self._attr_native_unit_of_measurement = hass.config.units.temperature_unit
self._attr_native_step = entry_infos.get(CONF_STEP_TEMPERATURE, 0.5)
self._attr_native_min_value = entry_infos.get(CONF_TEMP_MIN)
@@ -371,7 +371,7 @@ class CentralConfigTemperatureNumber(
# TODO Kelvin ? It seems not because all internal values are stored in
# ° Celsius but only the render in front can be in °K depending on the
# user configuration.
return UnitOfTemperature.CELSIUS
return self.hass.config.units.temperature_unit
class TemperatureNumber( # pylint: disable=abstract-method
@@ -400,7 +400,7 @@ class TemperatureNumber( # pylint: disable=abstract-method
self._attr_unique_id = f"{self._device_name}_preset_{preset_name}"
self._attr_device_class = NumberDeviceClass.TEMPERATURE
self._attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
self._attr_native_unit_of_measurement = hass.config.units.temperature_unit
self._has_central_main_attributes = entry_infos.get(
CONF_USE_MAIN_CENTRAL_CONFIG, False
@@ -498,7 +498,7 @@ class TemperatureNumber( # pylint: disable=abstract-method
def native_unit_of_measurement(self) -> str | None:
"""The unit of measurement"""
if not self.my_climate:
return UnitOfTemperature.CELSIUS
return self.hass.config.units.temperature_unit
return self.my_climate.temperature_unit
def init_min_max_step(self, entry_infos=None):

View File

@@ -70,9 +70,9 @@ class PropAlgorithm:
if hvac_mode == HVACMode.COOL:
delta_temp = current_temp - target_temp
delta_ext_temp = (
ext_current_temp
ext_current_temp - target_temp
if ext_current_temp is not None
else 0 - target_temp
else 0
)
else:
delta_temp = target_temp - current_temp

View File

@@ -570,7 +570,7 @@ class RegulatedTemperatureSensor(VersatileThermostatBaseEntity, SensorEntity):
@property
def native_unit_of_measurement(self) -> str | None:
if not self.my_climate:
return UnitOfTemperature.CELSIUS
return self.hass.config.units.temperature_unit
return self.my_climate.temperature_unit
@property
@@ -621,7 +621,7 @@ class EMATemperatureSensor(VersatileThermostatBaseEntity, SensorEntity):
@property
def native_unit_of_measurement(self) -> str | None:
if not self.my_climate:
return UnitOfTemperature.CELSIUS
return self.hass.config.units.temperature_unit
return self.my_climate.temperature_unit
@property

View File

@@ -3,13 +3,12 @@
import logging
from datetime import timedelta, datetime
from homeassistant.core import HomeAssistant, State, callback
from homeassistant.core import Event, HomeAssistant, State, callback
from homeassistant.helpers.event import (
async_track_state_change_event,
async_track_time_interval,
EventStateChangedData,
)
from homeassistant.helpers.typing import EventType as HASSEventType
from homeassistant.components.climate import (
HVACAction,
HVACMode,
@@ -168,11 +167,17 @@ class ThermostatOverClimate(BaseThermostat[UnderlyingClimate]):
_LOGGER.info("%s - regulation calculation will be done", self)
# use _attr_target_temperature_step to round value if _auto_regulation_dtemp is equal to 0
regulation_step = self._auto_regulation_dtemp if self._auto_regulation_dtemp else self._attr_target_temperature_step
_LOGGER.debug("%s - usage of regulation_step: %.2f ",
self,
regulation_step)
new_regulated_temp = round_to_nearest(
self._regulation_algo.calculate_regulated_temperature(
self.current_temperature, self._cur_ext_temp
),
self._auto_regulation_dtemp,
regulation_step,
)
dtemp = new_regulated_temp - self._regulated_target_temp
@@ -216,7 +221,7 @@ class ThermostatOverClimate(BaseThermostat[UnderlyingClimate]):
):
offset_temp = device_temp - self.current_temperature
target_temp = round_to_nearest(self.regulated_target_temp + offset_temp, self._auto_regulation_dtemp)
target_temp = round_to_nearest(self.regulated_target_temp + offset_temp, regulation_step)
_LOGGER.debug(
"%s - The device offset temp for regulation is %.2f - internal temp is %.2f. New target is %.2f",
@@ -594,7 +599,7 @@ class ThermostatOverClimate(BaseThermostat[UnderlyingClimate]):
)
@callback
async def _async_climate_changed(self, event: HASSEventType[EventStateChangedData]):
async def _async_climate_changed(self, event: Event[EventStateChangedData]):
"""Handle unerdlying climate state changes.
This method takes the underlying values and update the VTherm with them.
To avoid loops (issues #121 #101 #95 #99), we discard the event if it is received
@@ -894,10 +899,7 @@ class ThermostatOverClimate(BaseThermostat[UnderlyingClimate]):
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement."""
if self.underlying_entity(0):
return self.underlying_entity(0).temperature_unit
return self._unit
return self.hass.config.units.temperature_unit
@property
def supported_features(self):

View File

@@ -2,12 +2,11 @@
""" A climate over switch classe """
import logging
from homeassistant.core import callback
from homeassistant.core import Event, callback
from homeassistant.helpers.event import (
async_track_state_change_event,
EventStateChangedData,
)
from homeassistant.helpers.typing import EventType as HASSEventType
from homeassistant.components.climate import HVACMode
from .const import (
@@ -212,7 +211,7 @@ class ThermostatOverSwitch(BaseThermostat[UnderlyingSwitch]):
)
@callback
def _async_switch_changed(self, event: HASSEventType[EventStateChangedData]):
def _async_switch_changed(self, event: Event[EventStateChangedData]):
"""Handle heater switch state changes."""
new_state = event.data.get("new_state")
old_state = event.data.get("old_state")

View File

@@ -8,8 +8,7 @@ from homeassistant.helpers.event import (
async_track_time_interval,
EventStateChangedData,
)
from homeassistant.helpers.typing import EventType as HASSEventType
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.components.climate import HVACMode
from .base_thermostat import BaseThermostat, ConfigData
@@ -149,7 +148,7 @@ class ThermostatOverValve(BaseThermostat[UnderlyingValve]): # pylint: disable=a
)
@callback
async def _async_valve_changed(self, event: HASSEventType[EventStateChangedData]):
async def _async_valve_changed(self, event: Event[EventStateChangedData]):
"""Handle unerdlying valve state changes.
This method just log the change. It changes nothing to avoid loops.
"""

View File

@@ -12,6 +12,25 @@
"thermostat_type": "Len jeden centrálny typ konfigurácie je možný"
}
},
"menu": {
"title": "Menu",
"description": "Nakonfigurujte si termostat. Po zadaní všetkých požadovaných parametrov budete môcť dokončiť konfiguráciu.",
"menu_options": {
"main": "Hlavné atribúty",
"central_boiler": "Centrálny kotol",
"type": "Podklady",
"tpi": "TPI parametre",
"features": "Vlastnosti",
"presets": "Predvoľby",
"window": "Detekcia okien",
"motion": "Detekcia pohybu",
"power": "Správa napájania",
"presence": "Detekcia prítomnosti",
"advanced": "Pokročilé parametre",
"finalize": "Všetko hotové",
"configuration_not_complete": "Konfigurácia nie je dokončená"
}
},
"main": {
"title": "Pridajte nový všestranný termostat",
"description": "Hlavné povinné atribúty",
@@ -19,22 +38,32 @@
"name": "Názov",
"thermostat_type": "Termostat typ",
"temperature_sensor_entity_id": "ID entity snímača teploty",
"last_seen_temperature_sensor_entity_id": "Dátum posledného zobrazenia izbovej teploty",
"external_temperature_sensor_entity_id": "ID entity externého snímača teploty",
"cycle_min": "Trvanie cyklu (minúty)",
"temp_min": "Minimálna povolená teplota",
"temp_max": "Maximálna povolená teplota",
"step_temperature": "Krok teploty",
"device_power": "Napájanie zariadenia",
"use_central_mode": "Povoliť ovládanie centrálnou entitou (potrebná centrálna konfigurácia)",
"use_main_central_config": "Použite dodatočnú centrálnu hlavnú konfiguráciu. Začiarknite, ak chcete použiť centrálnu hlavnú konfiguráciu (vonkajšia teplota, min, max, krok, ...).",
"used_by_controls_central_boiler": "Používa sa centrálnym kotlom. Skontrolujte, či má mať tento VTherm ovládanie na centrálnom kotli"
},
"data_description": {
"temperature_sensor_entity_id": "ID entity snímača izbovej teploty",
"last_seen_temperature_sensor_entity_id": "Naposledy videný snímač izbovej teploty ID entity. Mal by to byť snímač dátumu a času",
"external_temperature_sensor_entity_id": "ID entity snímača vonkajšej teploty. Nepoužíva sa, ak je zvolená centrálna konfigurácia"
}
},
"features": {
"title": "Vlastnosti",
"description": "Vlastnosti termostatu",
"data": {
"use_window_feature": "Použite detekciu okien",
"use_motion_feature": "Použite detekciu pohybu",
"use_power_feature": "Použite správu napájania",
"use_presence_feature": "Použite detekciu prítomnosti",
"use_main_central_config": "Použite centrálnu hlavnú konfiguráciu"
},
"data_description": {
"use_central_mode": "Zaškrtnutím povolíte ovládanie VTherm pomocou vybraných entít central_mode",
"use_main_central_config": "Začiarknite, ak chcete použiť centrálnu hlavnú konfiguráciu. Zrušte začiarknutie, ak chcete použiť špecifickú hlavnú konfiguráciu pre tento VTherm",
"external_temperature_sensor_entity_id": "ID entity snímača vonkajšej teploty. Nepoužíva sa, ak je zvolená centrálna konfigurácia"
"use_central_boiler_feature": "Použite centrálny kotol. Začiarknutím tohto políčka pridáte ovládanie do centrálneho kotla. Po zaškrtnutí tohto políčka budete musieť nakonfigurovať VTherm, ktorý bude mať ovládanie centrálneho kotla, aby sa prejavilo. Ak jeden VTherm vyžaduje ohrev, kotol sa zapne. Ak žiadny VTherm nevyžaduje ohrev, kotol sa vypne. Príkazy na zapnutie/vypnutie centrálneho kotla sú uvedené na príslušnej konfiguračnej stránke"
}
},
"type": {
@@ -45,6 +74,7 @@
"heater_entity2_id": "2. spínač ohrievača",
"heater_entity3_id": "3. spínač ohrievača",
"heater_entity4_id": "4. spínač ohrievača",
"heater_keep_alive": "Prepnite interval udržiavania v sekundách",
"proportional_function": "Algoritmus",
"climate_entity_id": "1. základná klíma",
"climate_entity2_id": "2. základná klíma",
@@ -58,6 +88,7 @@
"auto_regulation_mode": "Samoregulácia",
"auto_regulation_dtemp": "Regulačný prah",
"auto_regulation_periode_min": "Regulačné minimálne obdobie",
"auto_regulation_use_device_temp": "Použite vnútornú teplotu podkladu",
"inverse_switch_command": "Inverzný prepínací príkaz",
"auto_fan_mode": "Režim automatického ventilátora"
},
@@ -66,6 +97,7 @@
"heater_entity2_id": "Voliteľné ID entity 2. ohrievača. Ak sa nepoužíva, nechajte prázdne",
"heater_entity3_id": "Voliteľné ID entity 3. ohrievača. Ak sa nepoužíva, nechajte prázdne",
"heater_entity4_id": "Voliteľné ID entity 4. ohrievača. Ak sa nepoužíva, nechajte prázdne",
"heater_keep_alive": "Voliteľný interval obnovy stavu spínača ohrievača. Ak to nie je potrebné, nechajte prázdne.",
"proportional_function": "Algoritmus, ktorý sa má použiť (TPI je zatiaľ jediný)",
"climate_entity_id": "ID základnej klimatickej entity",
"climate_entity2_id": "2. základné identifikačné číslo klimatickej entity",
@@ -79,6 +111,7 @@
"auto_regulation_mode": "Automatické nastavenie cieľovej teploty",
"auto_regulation_dtemp": "Hranica v °, pod ktorou sa zmena teploty neodošle",
"auto_regulation_periode_min": "Trvanie v minútach medzi dvoma aktualizáciami predpisov",
"auto_regulation_use_device_temp": "Na urýchlenie samoregulácie použite prípadný vnútorný snímač teploty podkladu",
"inverse_switch_command": "V prípade spínača s pilotným vodičom a diódou možno budete musieť príkaz invertovať",
"auto_fan_mode": "Automaticky aktivujte ventilátor, keď je potrebné veľké vykurovanie/chladenie"
}
@@ -101,24 +134,7 @@
"title": "Predvoľby",
"description": "Pre každú predvoľbu zadajte cieľovú teplotu (0, ak chcete predvoľbu ignorovať)",
"data": {
"eco_temp": "Teplota v predvoľbe Eco",
"comfort_temp": "Prednastavená teplota v komfortnom režime",
"boost_temp": "Teplota v prednastavení Boost",
"frost_temp": "Teplota v prednastavení Frost protection",
"eco_ac_temp": "Teplota v režime Eco prednastavená pre režim AC",
"comfort_ac_temp": "Teplota v režime Comfort je prednastavená pre režim AC",
"boost_ac_temp": "Prednastavená teplota v režime Boost pre režim AC",
"use_presets_central_config": "Použite konfiguráciu centrálnych predvolieb"
},
"data_description": {
"eco_temp": "Teplota v predvoľbe Eco",
"comfort_temp": "Prednastavená teplota v komfortnom režime",
"boost_temp": "Teplota v prednastavení Boost",
"frost_temp": "Teplota v prednastavenej ochrane proti mrazu",
"eco_ac_temp": "Teplota v režime Eco prednastavená pre režim AC",
"comfort_ac_temp": "Teplota v režime Comfort je prednastavená pre režim AC",
"boost_ac_temp": "Prednastavená teplota v režime Boost pre režim AC",
"use_presets_central_config": "Začiarknite, ak chcete použiť konfiguráciu centrálnych predvolieb. Zrušte začiarknutie, ak chcete použiť špecifickú konfiguráciu predvolieb pre tento VTherm"
}
},
"window": {
@@ -130,7 +146,8 @@
"window_auto_open_threshold": "Prah poklesu teploty pre automatickú detekciu otvoreného okna (v °/hodina)",
"window_auto_close_threshold": "Prahová hodnota zvýšenia teploty pre koniec automatickej detekcie (v °/hodina)",
"window_auto_max_duration": "Maximálne trvanie automatickej detekcie otvoreného okna (v min)",
"use_window_central_config": "Použite centrálnu konfiguráciu okna"
"use_window_central_config": "Použite centrálnu konfiguráciu okna",
"window_action": "Akcia"
},
"data_description": {
"window_sensor_entity_id": "Nechajte prázdne, ak nemáte použiť žiadny okenný senzor",
@@ -138,7 +155,8 @@
"window_auto_open_threshold": "Odporúčaná hodnota: medzi 3 a 10. Ak sa nepoužíva automatická detekcia otvoreného okna, nechajte prázdne",
"window_auto_close_threshold": "Odporúčaná hodnota: 0. Ak sa nepoužíva automatická detekcia otvoreného okna, nechajte prázdne",
"window_auto_max_duration": "Odporúčaná hodnota: 60 (jedna hodina). Ak sa nepoužíva automatická detekcia otvoreného okna, nechajte prázdne",
"use_window_central_config": "Začiarknite, ak chcete použiť konfiguráciu centrálneho okna. Zrušte začiarknutie, ak chcete použiť špecifickú konfiguráciu okna pre tento VTherm"
"use_window_central_config": "Začiarknite, ak chcete použiť konfiguráciu centrálneho okna. Zrušte začiarknutie, ak chcete použiť špecifickú konfiguráciu okna pre tento VTherm",
"window_action": "Akcia, ktorá sa má vykonať, ak sa okno zistí ako otvorené"
}
},
"motion": {
@@ -181,26 +199,11 @@
"title": "Riadenie prítomnosti",
"description": "Atribúty správy prítomnosti.\nPoskytuje senzor prítomnosti vášho domova (pravda, ak je niekto prítomný).\nPotom zadajte buď predvoľbu, ktorá sa má použiť, keď je senzor prítomnosti nepravdivý, alebo posun teploty, ktorý sa má použiť.\nAk je zadaná predvoľba, posun sa nepoužije.\nAk sa nepoužije, ponechajte zodpovedajúce entity_id prázdne.",
"data": {
"presence_sensor_entity_id": "ID entity senzora prítomnosti",
"eco_away_temp": "Teplota v prednastavenej Eco, keď nie je žiadna prítomnosť",
"comfort_away_temp": "Teplota v režime Comfort je prednastavená, keď nie je prítomný",
"boost_away_temp": "Prednastavená teplota v režime Boost, keď nie je prítomný",
"frost_away_temp": "Prednastavená teplota v režime Frost protection, keď nie je prítomný",
"eco_ac_away_temp": "Teplota v prednastavenej Eco, keď nie je prítomná v režime AC",
"comfort_ac_away_temp": "Teplota v režime Comfort je prednastavená, keď nie je prítomný v režime AC",
"boost_ac_away_temp": "Teplota v prednastavenom Boost, keď nie je prítomný v režime AC",
"use_presence_central_config": "Použite centrálnu konfiguráciu prítomnosti"
"presence_sensor_entity_id": "Senzora prítomnosti",
"use_presence_central_config": "Použite konfiguráciu centrálnej prítomnosti teploty. Ak chcete použiť špecifické teplotné entity, zrušte výber"
},
"data_description": {
"presence_sensor_entity_id": "ID entity senzora prítomnosti",
"eco_away_temp": "Teplota v prednastavenej Eco, keď nie je žiadna prítomnosť",
"comfort_away_temp": "Teplota v režime Comfort je prednastavená, keď nie je prítomný",
"boost_away_temp": "Prednastavená teplota v režime Boost, keď nie je prítomný",
"frost_away_temp": "Teplota v Prednastavená ochrana pred mrazom, keď nie je prítomný",
"eco_ac_away_temp": "Teplota v prednastavenej Eco, keď nie je prítomná v režime AC",
"comfort_ac_away_temp": "Teplota v režime Comfort je prednastavená, keď nie je prítomný v režime AC",
"boost_ac_away_temp": "Teplota v prednastavenom Boost, keď nie je prítomný v režime AC",
"use_presence_central_config": "Začiarknite, ak chcete použiť konfiguráciu centrálnej prítomnosti. Zrušte začiarknutie, ak chcete použiť špecifickú konfiguráciu prítomnosti pre tento VTherm"
"presence_sensor_entity_id": "ID entity senzora prítomnosti"
}
},
"advanced": {
@@ -244,6 +247,25 @@
"thermostat_type": "Je možný len jeden centrálny typ konfigurácie"
}
},
"menu": {
"title": "Menu",
"description": "Nakonfigurujte si termostat. Po zadaní všetkých požadovaných parametrov budete môcť dokončiť konfiguráciu.",
"menu_options": {
"main": "Hlavné atribúty",
"central_boiler": "Centrálny kotol",
"type": "Podklady",
"tpi": "TPI parametre",
"features": "Vlastnosti",
"presets": "Predvoľby",
"window": "Detekcia okien",
"motion": "Detekcia pohybu",
"power": "Správa napájania",
"presence": "Detekcia prítomnosti",
"advanced": "Pokročilé parametre",
"finalize": "Všetko hotové",
"configuration_not_complete": "Konfigurácia nie je dokončená"
}
},
"main": {
"title": "Hlavný - {name}",
"description": "Hlavné povinné atribúty",
@@ -251,22 +273,32 @@
"name": "Názov",
"thermostat_type": "Termostat typ",
"temperature_sensor_entity_id": "ID entity snímača teploty",
"last_seen_temperature_sensor_entity_id": "Dátum posledného zobrazenia izbovej teploty",
"external_temperature_sensor_entity_id": "ID entity externého snímača teploty",
"cycle_min": "Trvanie cyklu (minúty)",
"temp_min": "Minimálna povolená teplota",
"temp_max": "Maximálna povolená teplota",
"step_temperature": "Krok teploty",
"device_power": "Výkon zariadenia (kW)",
"use_central_mode": "Povoliť ovládanie centrálnou entitou (potrebná centrálna konfigurácia)",
"use_central_mode": "Povoliť ovládanie centrálnou entitou (vyžaduje centrálnu konfiguráciu). Zaškrtnutím povolíte ovládanie VTherm pomocou vybraných entít central_mode.",
"use_main_central_config": "Použite dodatočnú centrálnu hlavnú konfiguráciu. Začiarknite, ak chcete použiť centrálnu hlavnú konfiguráciu (vonkajšia teplota, min, max, krok, ...).",
"used_by_controls_central_boiler": "Používa sa centrálnym kotlom. Skontrolujte, či má mať tento VTherm ovládanie na centrálnom kotli"
},
"data_description": {
"temperature_sensor_entity_id": "ID entity snímača izbovej teploty",
"last_seen_temperature_sensor_entity_id": "Naposledy videný snímač izbovej teploty ID entity. Mal by to byť snímač dátumu a času",
"external_temperature_sensor_entity_id": "ID entity snímača vonkajšej teploty. Nepoužíva sa, ak je zvolená centrálna konfigurácia"
}
},
"features": {
"title": "Vlastnosti - {name}",
"description": "Vlastnosti termostatu",
"data": {
"use_window_feature": "Použite detekciu okien",
"use_motion_feature": "Použite detekciu pohybu",
"use_power_feature": "Použite správu napájania",
"use_presence_feature": "Použite detekciu prítomnosti",
"use_main_central_config": "Použite centrálnu hlavnú konfiguráciu"
},
"data_description": {
"use_central_mode": "Zaškrtnutím povolíte ovládanie VTherm pomocou vybraných entít central_mode",
"use_main_central_config": "Začiarknite, ak chcete použiť centrálnu hlavnú konfiguráciu. Ak chcete použiť špecifickú konfiguráciu pre tento VTherm, zrušte začiarknutie",
"external_temperature_sensor_entity_id": "ID entity snímača vonkajšej teploty. Nepoužíva sa, ak je zvolená centrálna konfigurácia"
"use_central_boiler_feature": "Použite centrálny kotol. Začiarknutím tohto políčka pridáte ovládanie do centrálneho kotla. Po zaškrtnutí tohto políčka budete musieť nakonfigurovať VTherm, ktorý bude mať ovládanie centrálneho kotla, aby sa prejavilo. Ak jeden VTherm vyžaduje ohrev, kotol sa zapne. Ak žiadny VTherm nevyžaduje ohrev, kotol sa vypne. Príkazy na zapnutie/vypnutie centrálneho kotla sú uvedené na príslušnej konfiguračnej stránke"
}
},
"type": {
@@ -277,6 +309,7 @@
"heater_entity2_id": "2. spínač ohrievača",
"heater_entity3_id": "3. spínač ohrievača",
"heater_entity4_id": "4. spínač ohrievača",
"heater_keep_alive": "Prepnite interval udržiavania v sekundách",
"proportional_function": "Algoritmus",
"climate_entity_id": "Základná klíma",
"climate_entity2_id": "2. základná klíma",
@@ -290,6 +323,7 @@
"auto_regulation_mode": "Samoregulácia",
"auto_regulation_dtemp": "Regulačný prah",
"auto_regulation_periode_min": "Regulačné minimálne obdobie",
"auto_regulation_use_device_temp": "Použite vnútornú teplotu podkladu",
"inverse_switch_command": "Inverzný prepínací príkaz",
"auto_fan_mode": "Režim automatického ventilátora"
},
@@ -298,6 +332,7 @@
"heater_entity2_id": "Voliteľné ID entity 2. ohrievača. Ak sa nepoužíva, nechajte prázdne",
"heater_entity3_id": "Voliteľné ID entity 3. ohrievača. Ak sa nepoužíva, nechajte prázdne",
"heater_entity4_id": "Voliteľné ID entity 4. ohrievača. Ak sa nepoužíva, nechajte prázdne",
"heater_keep_alive": "Voliteľný interval obnovy stavu spínača ohrievača. Ak to nie je potrebné, nechajte prázdne.",
"proportional_function": "Algoritmus, ktorý sa má použiť (TPI je zatiaľ jediný)",
"climate_entity_id": "ID základnej klimatickej entity",
"climate_entity2_id": "2. základný identifikátor klimatickej entity",
@@ -311,6 +346,7 @@
"auto_regulation_mode": "Automatické nastavenie cieľovej teploty",
"auto_regulation_dtemp": "Hranica v °, pod ktorou sa zmena teploty neodošle",
"auto_regulation_periode_min": "Trvanie v minútach medzi dvoma aktualizáciami predpisov",
"auto_regulation_use_device_temp": "Na urýchlenie samoregulácie použite prípadný vnútorný snímač teploty podkladu",
"inverse_switch_command": "V prípade spínača s pilotným vodičom a diódou možno budete musieť príkaz invertovať",
"auto_fan_mode": "Automaticky aktivujte ventilátor, keď je potrebné veľké vykurovanie/chladenie"
}
@@ -333,24 +369,7 @@
"title": "Predvoľby - {name}",
"description": "Pre každú predvoľbu zadajte cieľovú teplotu (0, ak chcete predvoľbu ignorovať)",
"data": {
"eco_temp": "Teplota v predvoľbe Eco",
"comfort_temp": "Prednastavená teplota v komfortnom režime",
"boost_temp": "Teplota v prednastavení Boost",
"frost_temp": "Teplota v prednastavení Frost protection",
"eco_ac_temp": "Teplota v režime Eco prednastavená pre režim AC",
"comfort_ac_temp": "Teplota v režime Comfort je prednastavená pre režim AC",
"boost_ac_temp": "Prednastavená teplota v režime Boost pre režim AC",
"use_presets_central_config": "Použite konfiguráciu centrálnych predvolieb"
},
"data_description": {
"eco_temp": "Teplota v predvoľbe Eco",
"comfort_temp": "Prednastavená teplota v komfortnom režime",
"boost_temp": "Teplota v prednastavení Boost",
"frost_temp": "Teplota v prednastavenej ochrane proti mrazu",
"eco_ac_temp": "Teplota v režime Eco prednastavená pre režim AC",
"comfort_ac_temp": "Teplota v režime Comfort je prednastavená pre režim AC",
"boost_ac_temp": "Prednastavená teplota v režime Boost pre režim AC",
"use_presets_central_config": "Začiarknite, ak chcete použiť konfiguráciu centrálnych predvolieb. Zrušte začiarknutie, ak chcete použiť špecifickú konfiguráciu predvolieb pre tento VTherm"
}
},
"window": {
@@ -362,7 +381,8 @@
"window_auto_open_threshold": "Prah poklesu teploty pre automatickú detekciu otvoreného okna (v °/hodina)",
"window_auto_close_threshold": "Prahová hodnota zvýšenia teploty pre koniec automatickej detekcie (v °/hodina)",
"window_auto_max_duration": "Maximálne trvanie automatickej detekcie otvoreného okna (v min)",
"use_window_central_config": "Použite centrálnu konfiguráciu okna"
"use_window_central_config": "Použite centrálnu konfiguráciu okna",
"window_action": "Akcia"
},
"data_description": {
"window_sensor_entity_id": "Nechajte prázdne, ak nemáte použiť žiadny okenný senzor",
@@ -370,7 +390,8 @@
"window_auto_open_threshold": "Odporúčaná hodnota: medzi 3 a 10. Ak sa nepoužíva automatická detekcia otvoreného okna, nechajte prázdne",
"window_auto_close_threshold": "Odporúčaná hodnota: 0. Ak sa nepoužíva automatická detekcia otvoreného okna, nechajte prázdne",
"window_auto_max_duration": "Odporúčaná hodnota: 60 (jedna hodina). Ak sa nepoužíva automatická detekcia otvoreného okna, nechajte prázdne",
"use_window_central_config": "Začiarknite, ak chcete použiť konfiguráciu centrálneho okna. Zrušte začiarknutie, ak chcete použiť špecifickú konfiguráciu okna pre tento VTherm"
"use_window_central_config": "Začiarknite, ak chcete použiť konfiguráciu centrálneho okna. Zrušte začiarknutie, ak chcete použiť špecifickú konfiguráciu okna pre tento VTherm",
"window_action": "Akcia, ktorá sa má vykonať, ak sa okno zistí ako otvorené"
}
},
"motion": {
@@ -410,29 +431,14 @@
}
},
"presence": {
"title": "Riadenie prítomnosti",
"description": "Atribúty správy prítomnosti.\nPoskytuje senzor prítomnosti vášho domova (pravda, ak je niekto prítomný).\nPotom zadajte buď predvoľbu, ktorá sa má použiť, keď je senzor prítomnosti nepravdivý, alebo posun teploty, ktorý sa má použiť.\nAk je zadaná predvoľba, posun sa nepoužije.\nAk sa nepoužije, ponechajte zodpovedajúce entity_id prázdne.",
"title": "Prítommnosť - {name}",
"description": "Atribúty riadenia prítomnosti.\nPoskytuje senzor prítomnosti vášho domova (pravda, je niekto prítomný) a poskytuje zodpovedajúce prednastavené nastavenie teploty.",
"data": {
"presence_sensor_entity_id": "ID entity senzora prítomnosti (pravda je prítomná)",
"eco_away_temp": "Teplota v prednastavenej Eco, keď nie je žiadna prítomnosť",
"comfort_away_temp": "Teplota v režime Comfort je prednastavená, keď nie je prítomný",
"boost_away_temp": "Prednastavená teplota v režime Boost, keď nie je prítomný",
"frost_away_temp": "Prednastavená teplota v režime Frost protection, keď nie je prítomný",
"eco_ac_away_temp": "Teplota v prednastavenej Eco, keď nie je prítomná v režime AC",
"comfort_ac_away_temp": "Teplota v režime Comfort je prednastavená, keď nie je prítomný v režime AC",
"boost_ac_away_temp": "Teplota v prednastavenom Boost, keď nie je prítomný v režime AC",
"use_presence_central_config": "Použite centrálnu konfiguráciu prítomnosti"
"presence_sensor_entity_id": "Senzor prítomnosti",
"use_presence_central_config": "Použite konfiguráciu centrálnej prítomnosti teploty. Ak chcete použiť špecifické entity teploty, zrušte začiarknutie"
},
"data_description": {
"presence_sensor_entity_id": "ID entity senzora prítomnosti",
"eco_away_temp": "Teplota v prednastavenej Eco, keď nie je žiadna prítomnosť",
"comfort_away_temp": "Teplota v režime Comfort je prednastavená, keď nie je prítomný",
"boost_away_temp": "Prednastavená teplota v režime Boost, keď nie je prítomný",
"frost_away_temp": "Teplota v Prednastavená ochrana pred mrazom, keď nie je prítomný",
"eco_ac_away_temp": "Teplota v prednastavenej Eco, keď nie je prítomná v režime AC",
"comfort_ac_away_temp": "Teplota v režime Comfort je prednastavená, keď nie je prítomný v režime AC",
"boost_ac_away_temp": "Teplota v prednastavenom Boost, keď nie je prítomný v režime AC",
"use_presence_central_config": "Začiarknite, ak chcete použiť konfiguráciu centrálnej prítomnosti. Zrušte začiarknutie, ak chcete použiť špecifickú konfiguráciu prítomnosti pre tento VTherm"
"presence_sensor_entity_id": "ID entity senzora prítomnosti"
}
},
"advanced": {
@@ -458,7 +464,8 @@
"unknown": "Neočakávaná chyba",
"unknown_entity": "Neznáme ID entity",
"window_open_detection_method": "Mala by sa použiť iba jedna metóda detekcie otvoreného okna. Použite senzor alebo automatickú detekciu cez teplotný prah, ale nie oboje",
"no_central_config": "Nemôžete zaškrtnúť „použiť centrálnu konfiguráciu“, pretože sa nenašla žiadna centrálna konfigurácia. Aby ste ho mohli používať, musíte si vytvoriť všestranný termostat typu „Central Configuration“."
"no_central_config": "Nemôžete zaškrtnúť „použiť centrálnu konfiguráciu“, pretože sa nenašla žiadna centrálna konfigurácia. Aby ste ho mohli používať, musíte si vytvoriť všestranný termostat typu „Central Configuration“.",
"service_configuration_format": "Formát konfigurácie služby je nesprávny"
},
"abort": {
"already_configured": "Zariadenie je už nakonfigurované"
@@ -491,6 +498,22 @@
"auto_fan_high": "Vysoký",
"auto_fan_turbo": "Turbo"
}
},
"window_action": {
"options": {
"window_turn_off": "Vypnúť",
"window_fan_only": "Len ventilátor",
"window_frost_temp": "Ochrana pred mrazom",
"window_eco_temp": "Eco"
}
},
"presets": {
"options": {
"frost": "Ochrana proti mrazu",
"eco": "Eco",
"comfort": "Komfort",
"boost": "Boost"
}
}
},
"entity": {
@@ -506,6 +529,53 @@
}
}
}
},
"number": {
"frost_temp": {
"name": "Mráz"
},
"eco_temp": {
"name": "Eco"
},
"comfort_temp": {
"name": "Komfort"
},
"boost_temp": {
"name": "Boost"
},
"frost_ac_temp": {
"name": "Mráz ac"
},
"eco_ac_temp": {
"name": "Eco ac"
},
"comfort_ac_temp": {
"name": "Komfort ac"
},
"boost_ac_temp": {
"name": "Boost ac"
},
"frost_away_temp": {
"name": "Mráz mimo"
},
"eco_away_temp": {
"name": "Eko mimo"
},
"comfort_away_temp": {
"name": "Komfort mimo"
},
"boost_away_temp": {
"name": "Boost mimo"
},
"eco_ac_away_temp": {
"name": "Eco ac mimo"
},
"comfort_ac_away_temp": {
"name": "Komfort ac mimo"
},
"boost_ac_away_temp": {
"name": "Boost ac mimo"
}
}
}
}

View File

@@ -5,7 +5,7 @@ import logging
from typing import Any
from enum import StrEnum
from homeassistant.const import ATTR_ENTITY_ID, STATE_ON, UnitOfTemperature
from homeassistant.const import ATTR_ENTITY_ID, STATE_ON, STATE_UNAVAILABLE
from homeassistant.core import State
from homeassistant.exceptions import ServiceNotFound
@@ -30,6 +30,7 @@ from homeassistant.components.number import SERVICE_SET_VALUE
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_call_later
from homeassistant.util.unit_conversion import TemperatureConverter
from .const import UnknownEntity, overrides
from .keep_alive import IntervalCaller
@@ -252,7 +253,28 @@ class UnderlyingSwitch(UnderlyingEntity):
async def _keep_alive_callback(self):
"""Keep alive: Turn on if already turned on, turn off if already turned off."""
await (self.turn_on() if self.is_device_active else self.turn_off())
timer = self._keep_alive.backoff_timer
state: State | None = self._hass.states.get(self._entity_id)
# Normal, expected state.state values are "on" and "off". An absent
# underlying MQTT switch was observed to produce either state == None
# or state.state == STATE_UNAVAILABLE ("unavailable").
if state is None or state.state == STATE_UNAVAILABLE:
if timer.is_ready():
_LOGGER.warning(
"Entity %s is not available (state: %s). Will keep trying "
"keep alive calls, but won't log this condition every time.",
self._entity_id,
state.state if state else "None",
)
else:
if timer.in_progress:
timer.reset()
_LOGGER.warning(
"Entity %s has recovered (state: %s).",
self._entity_id,
state.state,
)
await (self.turn_on() if self.is_device_active else self.turn_off())
# @overrides this breaks some unit tests TypeError: object MagicMock can't be used in 'await' expression
async def turn_off(self):
@@ -663,7 +685,7 @@ class UnderlyingClimate(UnderlyingEntity):
def temperature_unit(self) -> str:
"""Get the temperature_unit"""
if not self.is_initialized:
return UnitOfTemperature.CELSIUS
return self._hass.config.units.temperature_unit
return self._underlying_climate.temperature_unit
@property
@@ -704,7 +726,7 @@ class UnderlyingClimate(UnderlyingEntity):
if not hasattr(self._underlying_climate, "current_temperature"):
return None
return self._underlying_climate.current_temperature
return self._hass.states.get(self._entity_id).attributes.get("current_temperature")
def turn_aux_heat_on(self) -> None:
"""Turn auxiliary heater on."""
@@ -731,8 +753,12 @@ class UnderlyingClimate(UnderlyingEntity):
self._underlying_climate.min_temp is not None
and self._underlying_climate is not None
):
min_val = self._underlying_climate.min_temp
max_val = self._underlying_climate.max_temp
min_val = TemperatureConverter.convert(
self._underlying_climate.min_temp, self._underlying_climate.temperature_unit, self._hass.config.units.temperature_unit
)
max_val = TemperatureConverter.convert(
self._underlying_climate.max_temp, self._underlying_climate.temperature_unit, self._hass.config.units.temperature_unit
)
new_value = max(min_val, min(value, max_val))
else:

View File

@@ -48,9 +48,10 @@ SENSOR_DEFINITIONS: tuple[VigieEauSensorEntityDescription, ...] = (
key="fountains",
matchers=[
"alimentation des fontaines.+",
"douches .+ plages.+",
"douches .+ plage.+",
"fontaines",
"jeux d'eau",
".*Alimentation de douches de plage.*",
],
),
VigieEauSensorEntityDescription(
@@ -93,7 +94,8 @@ SENSOR_DEFINITIONS: tuple[VigieEauSensorEntityDescription, ...] = (
"Arrosage, arbustes et arbres",
"Arrosage des jardinières et suspensions",
"Arrosage des espaces arborés",
"Arrosage des terrains de sport",
"Arrosage.+terrains de sport",
"Arrosage terrains sport.+",
],
),
VigieEauSensorEntityDescription(
@@ -157,6 +159,8 @@ SENSOR_DEFINITIONS: tuple[VigieEauSensorEntityDescription, ...] = (
"Remplissage des piscine privées",
"Remplissage des piscines individuelles",
"remise à niveau des piscines",
"Remplissage de piscines.+",
"Piscines ouvertes au public.*",
],
),
VigieEauSensorEntityDescription(
@@ -201,6 +205,10 @@ SENSOR_DEFINITIONS: tuple[VigieEauSensorEntityDescription, ...] = (
"Entretien de cours d'eau",
"Travaux et rejets",
"Travaux sur les systèmes dassainissement occasionnant des rejets",
".*installations hydrauliques.*",
".*électricité dorigine hydraulique.*",
"production.+origine.+hydraulique.*",
"Installations de production d'électricité d'orignie hydraulique",
],
),
VigieEauSensorEntityDescription(
@@ -219,7 +227,10 @@ SENSOR_DEFINITIONS: tuple[VigieEauSensorEntityDescription, ...] = (
icon="mdi:golf",
category="golfs",
key="golfs",
matchers=["arrosage des golfs"],
matchers=[
"arrosage des golfs",
"Arrosage des.+golfs",
],
),
VigieEauSensorEntityDescription(
name="Prélèvement en canaux",
@@ -243,6 +254,7 @@ SENSOR_DEFINITIONS: tuple[VigieEauSensorEntityDescription, ...] = (
"Nouvelles demandes de prélèvement d'eau et création de forages",
"Création de prélèvements",
"Prélèvement en cours deau",
"alimentation en eau potable des populations.+",
],
),
VigieEauSensorEntityDescription(
@@ -259,6 +271,8 @@ SENSOR_DEFINITIONS: tuple[VigieEauSensorEntityDescription, ...] = (
# ICPE means "Installation classée pour la protection de l'environment"
"ICPE soumises à un APC relatif à la sécheresse",
"Usages récréatifs collectifs à partir deau potable.+",
"Réalisation de seuils provisoires",
"Activités industrielles et commerciales",
],
),
)

View File

@@ -9,25 +9,105 @@
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Alimentation des fontaines publiques",
"usage": "Alimentation de douches de plage, jeux deau",
"thematique": "Prélever"
},
{
"usage": "Alimentation des canaux et des rigoles",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Alimentation des fontaines d'ornement en circuit ouvert (publiques et privées) ",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Alimentation des fontaines publiques Et privées d'ornement en circuit ouvert",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Alimentation des fontaines publiques et privées d'ornement en circuit ouvert",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Alimentation des fontaines publiques et privées d'ornement en circuit ouvert",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Alimentation des fontaines publiques et privées dornement",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Alimentation des fontaines, pièces d'eau d'agrément et jeux d'eau récréatifs en circuit ouvert",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Alimentation en eau des fontaines publiques et privées dornement",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Alimentation fontaines publiques et privées dornement",
"thematique": "Prélever"
},
{
"usage": "Alimentation gravitaire des ouvrages dirrigation et des canaux d'agrément dans la Lozère",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Arrosage d'arbres et arbustes",
"thematique": "Arroser"
},
{
"usage": "Arrosage de surfaces de circulation générant de la poussière (chantiers, motocross, athlétisme...)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des espaces arborés, pelouses, massifs fleuris et espaces verts publics et privés",
"thematique": "Arroser"
},
{
"usage": "Arrosage des espaces verts et pelouses",
"thematique": "Arroser"
},
{
"usage": "Arrosage des golfs",
"thematique": "Arroser"
},
{
"usage": "Arrosage des golfs (conformément à laccord cadre golf et environnement 2019-2024)",
"thematique": "Irriguer"
},
{
"usage": "Arrosage des golfs (conformément à laccord cadre golf et environnement de 2019-2024)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des golfs(Conformément à l'accord cadre golf et environnement 2019-2024",
"thematique": "Arroser"
},
{
"usage": "Arrosage des greens et départs de golfs",
"thematique": "Arroser"
},
{
"usage": "Arrosage des jardins potagers",
"thematique": "Arroser"
},
{
"usage": "Arrosage des jardins potagers (y compris serres non agricoles)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des jardins potagers (y compris serres non-agricoles)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des jardins potagers (y compris serres, non agricoles)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des jardins potagers (yc serres non-agricoles)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des jardins potagers collectifs",
"thematique": "Arroser"
@@ -36,6 +116,14 @@
"usage": "Arrosage des jardins potagers individuels",
"thematique": "Arroser"
},
{
"usage": "Arrosage des jardins potagers y compris serres non agricoles",
"thematique": "Arroser"
},
{
"usage": "Arrosage des jardins potagers y/c serres non agricoles",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris",
"thematique": "Arroser"
@@ -44,18 +132,174 @@
"usage": "Arrosage des pelouses, massifs fleuris et espaces verts (y compris rond-points, voies de tramway).",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, Jardins dagrément, des espaces verts, golfs particuliers",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, arbres et arbustes",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, espaces verts",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, espaces verts, golfs particuliers",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, jardins d'agrément",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, jardins d'agrément, arrosage des espaces verts, golf particuliers",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, jardins dagrément, espaces verts et golfs particuliers",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, jardins dagrément, espaces verts, golfs particuliers",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris, jardins dagrément, golfs particuliers",
"thematique": "Arroser"
},
{
"usage": "Arrosage des pelouses, massifs fleuris. ",
"thematique": "Irriguer"
},
{
"usage": "Arrosage des plantations d'arbre de moins de 3 ans",
"thematique": "Arroser"
},
{
"usage": "Arrosage des plantations darbre de moins de 3 ans",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport (dont aires et centres équestres, circuits motocross ou VTT)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport (dont aires/centres équestres, hippodromes, circuits VTT/motocross)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport (dont aires/centres équestres, hippodromes, circuits motocross/VTT)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport (voir ACI)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport (y compris aires dévolutions équestres, centres équestres, hippodromes, circuits motocross, circuits vtt)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport (y compris aires et centres équestres, circuits VTT motocross)",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport (y compris aires équestres,circuits motocross, circuit vtt) ",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport dont aires et centres équestres, hippodromes, circuits motocross/VTT",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport y compris aires/centre équestres, hippodromes, circuits motocross/vtt",
"thematique": "Arroser"
},
{
"usage": "Arrosage des terrains de sport, sols équestres et terrains de sports motorisés",
"thematique": "Arroser"
},
{
"usage": "Arrosage massifs fleuris",
"thematique": "Arroser"
},
{
"usage": "Arrosage pelouses, massifs fleuris, Jardins dagrément, espaces verts, golfs particuliers",
"thematique": "Arroser"
},
{
"usage": "Arrosage pelouses, massifs fleuris, jardins agrément, espaces verts, golfs particuliers",
"thematique": "Arroser"
},
{
"usage": "Arrosage terrains de sport (+ centres équestres, hippodromes, circuits motocross/VTT",
"thematique": "Arroser"
},
{
"usage": "Arrosage terrains sport yc aires dévolutions et centres équestres, hippodromes, motocross, VTT",
"thematique": "Arroser"
},
{
"usage": "Arrosage, arbustes et arbres",
"thematique": "Arroser"
},
{
"usage": "Fonctionnement des douches de plages et tout autre dispositif analogue",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Fonctionnement des fontaines publiques et privées",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Gestion des ouvrages hydrauliques (hors plans d'eau et canaux)",
"thematique": "Ouvrages hydrauliques"
},
{
"usage": "ICPE soumises à un APC relatif à la sécheresse",
"thematique": "ICPE"
},
{
"usage": "Installations de production d'électricité d'origine hydraulique",
"thematique": "Installations de production d'électricité"
},
{
"usage": "Installations de production d'électricité d'origine nucléaire, hydraulique, et thermique à flamme",
"thematique": "Installations de production d'électricité"
},
{
"usage": "Installations de production d'électricité d'orignie hydraulique",
"thematique": "Installations de production d'électricité"
},
{
"usage": "Installations de production délectricité dorigine hydraulique",
"thematique": "Installations de production d'électricité"
},
{
"usage": "Irrigation par aspersion des cultures",
"thematique": "Irriguer"
},
{
"usage": "Jeux d'eau",
"thematique": "Alimenter des fontaines et autres usages de loisirs"
},
{
"usage": "Lavage de tous les véhicules et engins terrestres/nautiques dans des installations professionnelles",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules automobiles et engins nautiques par des particuliers",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules chez des professionnels",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules chez les particuliers",
"thematique": "Nettoyer"
@@ -65,7 +309,27 @@
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules et bateaux chez les particuliers",
"usage": "Lavage de véhicules et engins nautiques chez les particuliers",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules et engins nautiques chez les professionnels",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules et engins nautiques dans des installations de professionnels",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules et engins nautiques par des professionnels",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules et engins nautiques par les professionnels",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules et engins nautiques privés chez les particuliers",
"thematique": "Nettoyer"
},
{
@@ -80,18 +344,74 @@
"usage": "Lavage de véhicules publics ou privés en stations de lavage professionnelles.",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules, bateaux et engins nautiques chez les particuliers",
"thematique": "Arroser"
},
{
"usage": "Lavage de véhicules, bateaux et engins nautiques chez les particuliers",
"thematique": "Nettoyer"
},
{
"usage": "Lavage de véhicules, engins terrestres ou nautiques dans des installations professionnelles",
"thematique": "Nettoyer"
},
{
"usage": "Lavage des bateaux",
"thematique": "Nettoyer"
},
{
"usage": "Lavage des espaces publics (trottoirs, terrasses...)",
"thematique": "Nettoyer"
},
{
"usage": "Lavage des navires, bateaux et engins nautiques",
"thematique": "Nettoyer"
},
{
"usage": "Lavage dengins nautiques",
"thematique": "Nettoyer"
},
{
"usage": "Lavage et entretien des embarcations (motorisées ou non) en aire de carénage.",
"thematique": "Nettoyer"
},
{
"usage": "Manoeuvre de vannes d'installations hydrauliques",
"thematique": "Ouvrages hydrauliques"
},
{
"usage": "Manoeuvres de vannes d'installations hydrauliques",
"thematique": "Ouvrages hydrauliques"
},
{
"usage": "Manœuvres des vannes d'installations hydrauliques",
"thematique": "Ouvrages hydrauliques"
},
{
"usage": "Manœuvres des vannes dinstallations hydrauliques",
"thematique": "Ouvrages hydrauliques"
},
{
"usage": "Navigation fluviale",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Navigation fluviale.",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Nettoyage / arrosage des sites de manifestations temporaires sportives et culturelles",
"thematique": "Arroser"
},
{
"usage": "Nettoyage de la voirie, des trottoirs et autres surfaces imperméabilisées",
"thematique": "Nettoyer"
},
{
"usage": "Nettoyage des façades et toitures",
"thematique": "Nettoyer"
},
{
"usage": "Nettoyage des façades, terrasses et murs de clôture",
"thematique": "Nettoyer"
@@ -105,17 +425,65 @@
"thematique": "Nettoyer"
},
{
"usage": "Nettoyage des façades, toitures, trottoirs, terrasses, façades imperméabilisées...",
"usage": "Nettoyage des façades, toitures, trottoirs, voiries et autres surfaces imperméabilisées",
"thematique": "Nettoyer"
},
{
"usage": "Nettoyage des voieries",
"thematique": "Nettoyer"
},
{
"usage": "Nettoyage des voiries, terrasses, façades, toitures, trottoirs et autres surfaces imperméabilisées",
"thematique": "Nettoyer"
},
{
"usage": "Nettoyage des voiries, trottoirs et autres surfaces imperméabilisées",
"thematique": "Nettoyer"
},
{
"usage": "Orpaillage (professionnel et amateur)",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Orpaillage (professionnel et amateur) et pratiques ou activités dans le lit ou sur les berges",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Orpaillage (professionnel et amateur) et pratiques pouvant impacter les milieux aquatiques",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Orpaillage et activités dans le lit ou sur les berges cours d'eau ",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Orpaillage et pêche à laimant.",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Piscines ouvertes au public",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Pratique de la navigation de loisir, y compris le canoë et le kayak*",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Pratique du canyoning sur matériaux alluvionnaires",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Pratiques ou activités dans le lit pouvant avoir un impact sur les milieux aquatiques",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Prélèvement en canaux ",
"thematique": "Prélever"
},
{
"usage": "Prélèvement en canaux et en cours deau",
"thematique": "Prélever"
},
{
"usage": "Rejets et travaux en rivière",
"thematique": "Travaux et activités en cours d'eau"
@@ -128,6 +496,54 @@
"usage": "Remplissage / vidange des plans d'eau.",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage / vidange des plans deau",
"thematique": "Prélever"
},
{
"usage": "Remplissage de piscines accueillant du public",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage de piscines domestiques (de plus d'1m³)",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage de piscines familiales",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage de plans deau sauf destinés à lAEP et soutien détiage permis par arrêté",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage des plans d'eau",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage des plans d'eau sauf retenues destinées à l'AEP et retenues participant au soutien d'éti",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage des plans d'eau, étangs, bassin d'agrément et manoeuvre de vannes",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage des plans deau",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage des plans deau (sauf retenues autorisées ou dédiées à l'eau potable",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage des plans deau sauf diverses retenues (voir arrêté)",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage des plans deau sauf retenues destinées à lAEP et retenues participant au soutien détiage dont larrêté dautorisation le permet",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage et vidange de piscines privées",
"thematique": "Remplir ou vidanger"
@@ -140,6 +556,14 @@
"usage": "Remplissage et vidange de piscines privées (de plus d'1 m3).",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage et vidange de piscines privées (de plus d1m3) y compris les spas de loisirs",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Remplissage et vidange de piscines privées > 1 m3",
"thematique": "Prélever"
},
{
"usage": "Travaux en cours deau",
"thematique": "Travaux et activités en cours d'eau"
@@ -148,9 +572,45 @@
"usage": "Travaux en cours deau.",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Travaux en cours deau. ",
"thematique": "Travaux et activités en cours d'eau"
},
{
"usage": "Usages récréatifs collectifs à partir deau potable (dans le cadre de manifestations).",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Vidange de piscines",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Vidange de plans deau vers le réseau hydrographique",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Vidange des plans d'eau",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Vidange plans deau vers le réseau hydrographique",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Vidange totale de plans deau vers le réseau hydrographique",
"thematique": "Remplir ou vidanger"
},
{
"usage": "Vidanges piscines privées",
"thematique": "Remplir ou vidanger"
},
{
"usage": "alimentation en eau potable des populations ( usages prioritaires ; santé, salubrité, sécurité civile)",
"thematique": "Prélever"
},
{
"usage": "orpaillage (professionnel et amateur) et pratiques ou activités dans le lit ou sur les berges",
"thematique": "Travaux et activités en cours d'eau"
}
]
}

View File

@@ -1,17 +1,16 @@
"""https://github.com/dummylabs/thewatchman§"""
from datetime import timedelta
import logging
import os
import time
import json
import voluptuous as vol
from homeassistant.loader import async_get_integration
from anyio import Path
from homeassistant.helpers import config_validation as cv
from homeassistant.components import persistent_notification
from homeassistant.util import dt as dt_util
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.config_entries import ConfigEntry, SOURCE_IMPORT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
@@ -21,7 +20,6 @@ from homeassistant.const import (
EVENT_SERVICE_REMOVED,
EVENT_STATE_CHANGED,
EVENT_CALL_SERVICE,
STATE_UNKNOWN,
)
from .coordinator import WatchmanCoordinator
@@ -33,7 +31,7 @@ from .utils import (
table_renderer,
text_renderer,
get_config,
get_report_path,
async_get_report_path,
)
from .const import (
@@ -97,7 +95,7 @@ CONFIG_SCHEMA = vol.Schema(
)
async def async_setup(hass: HomeAssistantType, config: dict):
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up is called when Home Assistant is loading our component."""
if config.get(DOMAIN) is None:
# We get here if the integration is set up using config flow
@@ -113,7 +111,7 @@ async def async_setup(hass: HomeAssistantType, config: dict):
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up this integration using UI"""
_LOGGER.debug(entry.options)
_LOGGER.debug("Home assistant path: %s", hass.config.path(""))
@@ -134,19 +132,12 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
await add_event_handlers(hass)
if hass.is_running:
# integration reloaded or options changed via UI
parse_config(hass, reason="changes in watchman configuration")
await parse_config(hass, reason="changes in watchman configuration")
await coordinator.async_config_entry_first_refresh()
else:
# first run, home assistant is loading
# parse_config will be scheduled once HA is fully loaded
_LOGGER.info("Watchman started [%s]", VERSION)
# resources = hass.data["lovelace"]["resources"]
# await resources.async_get_info()
# for itm in resources.async_items():
# _LOGGER.debug(itm)
return True
@@ -155,9 +146,7 @@ async def update_listener(hass: HomeAssistant, entry: ConfigEntry):
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(
hass: HomeAssistant, config_entry
): # pylint: disable=unused-argument
async def async_unload_entry(hass: HomeAssistant, config_entry): # pylint: disable=unused-argument
"""Handle integration unload"""
for cancel_handle in hass.data[DOMAIN].get("cancel_handlers", []):
if cancel_handle:
@@ -189,7 +178,7 @@ async def add_services(hass: HomeAssistant):
async def async_handle_report(call):
"""Handle the service call"""
config = hass.data.get(DOMAIN_DATA, {})
path = get_report_path(hass, config.get(CONF_REPORT_PATH, None))
path = await async_get_report_path(hass, config.get(CONF_REPORT_PATH, None))
send_notification = call.data.get(CONF_SEND_NOTIFICATION, False)
create_file = call.data.get(CONF_CREATE_FILE, True)
test_mode = call.data.get(CONF_TEST_MODE, False)
@@ -211,7 +200,7 @@ async def add_services(hass: HomeAssistant):
await async_notification(hass, "Watchman error", message, error=True)
if call.data.get(CONF_PARSE_CONFIG, False):
parse_config(hass, reason="service call")
await parse_config(hass, reason="service call")
if send_notification:
chunk_size = call.data.get(CONF_CHUNK_SIZE, config.get(CONF_CHUNK_SIZE))
@@ -227,7 +216,7 @@ async def add_services(hass: HomeAssistant):
error=True,
)
if onboarding(hass, service, path):
if await async_onboarding(hass, service, path):
await async_notification(
hass,
"🖖 Achievement unlocked: first report!",
@@ -271,7 +260,7 @@ async def add_event_handlers(hass: HomeAssistant):
await coordinator.async_refresh()
async def async_on_home_assistant_started(event): # pylint: disable=unused-argument
parse_config(hass, reason="HA restart")
await parse_config(hass, reason="HA restart")
startup_delay = get_config(hass, CONF_STARTUP_DELAY, 0)
await async_schedule_refresh_states(hass, startup_delay)
@@ -284,12 +273,12 @@ async def add_event_handlers(hass: HomeAssistant):
"reload_core_config",
"reload",
]:
parse_config(hass, reason="configuration changes")
await parse_config(hass, reason="configuration changes")
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
elif typ in [EVENT_AUTOMATION_RELOADED, EVENT_SCENE_RELOADED]:
parse_config(hass, reason="configuration changes")
await parse_config(hass, reason="configuration changes")
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
@@ -341,14 +330,14 @@ async def add_event_handlers(hass: HomeAssistant):
hass.data[DOMAIN]["cancel_handlers"] = hdlr
def parse_config(hass: HomeAssistant, reason=None):
async def parse_config(hass: HomeAssistant, reason=None):
"""parse home assistant configuration files"""
assert hass.data.get(DOMAIN_DATA)
start_time = time.time()
included_folders = get_included_folders(hass)
ignored_files = hass.data[DOMAIN_DATA].get(CONF_IGNORED_FILES, None)
entity_list, service_list, files_parsed, files_ignored = parse(
entity_list, service_list, files_parsed, files_ignored = await parse(
hass, included_folders, ignored_files, hass.config.config_dir
)
hass.data[DOMAIN]["entity_list"] = entity_list
@@ -376,10 +365,10 @@ def get_included_folders(hass):
config_folders = [hass.config.config_dir]
for fld in config_folders:
folders.append(os.path.join(fld, "**/*.yaml"))
folders.append((fld, "**/*.yaml"))
if DOMAIN_DATA in hass.data and hass.data[DOMAIN_DATA].get(CONF_CHECK_LOVELACE):
folders.append(os.path.join(hass.config.config_dir, ".storage/**/lovelace*"))
folders.append((hass.config.config_dir, ".storage/**/lovelace*"))
return folders
@@ -388,11 +377,16 @@ async def async_report_to_file(hass, path, test_mode):
"""save report to a file"""
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
report_chunks = report(hass, table_renderer, chunk_size=0, test_mode=test_mode)
# OSError exception is handled in async_handle_report
with open(path, "w", encoding="utf-8") as report_file:
for chunk in report_chunks:
report_file.write(chunk)
report_chunks = await report(
hass, table_renderer, chunk_size=0, test_mode=test_mode
)
def write(path):
with open(path, "w", encoding="utf-8") as report_file:
for chunk in report_chunks:
report_file.write(chunk)
await hass.async_add_executor_job(write, path)
async def async_report_to_notification(hass, service_str, service_data, chunk_size):
@@ -423,7 +417,7 @@ async def async_report_to_notification(hass, service_str, service_data, chunk_si
coordinator = hass.data[DOMAIN]["coordinator"]
await coordinator.async_refresh()
report_chunks = report(hass, text_renderer, chunk_size)
report_chunks = await report(hass, text_renderer, chunk_size)
for chunk in report_chunks:
data["message"] = chunk
# blocking=True ensures execution order
@@ -446,7 +440,7 @@ async def async_notification(hass, title, message, error=False, n_id="watchman")
raise HomeAssistantError(message.replace("`", ""))
def onboarding(hass, service, path):
async def async_onboarding(hass, service, path):
"""check if the user runs report for the first time"""
service = service or get_config(hass, CONF_SERVICE_NAME, None)
return not (service or os.path.exists(path))
return not (service or await Path(path).exists())

View File

@@ -1,4 +1,5 @@
"ConfigFlow definition for watchman"
from typing import Dict
import json
from json.decoder import JSONDecodeError
@@ -7,7 +8,7 @@ from homeassistant.config_entries import ConfigFlow, OptionsFlow, ConfigEntry
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, selector
import voluptuous as vol
from .utils import is_service, get_columns_width, get_report_path
from .utils import is_service, get_columns_width, async_get_report_path
from .const import (
DOMAIN,
@@ -91,7 +92,7 @@ class OptionsFlowHandler(OptionsFlow):
def __init__(self, config_entry: ConfigEntry) -> None:
self.config_entry = config_entry
def default(self, key, uinput=None):
async def async_default(self, key, uinput=None):
"""provide default value for an OptionsFlow field"""
if uinput and key in uinput:
# supply last entered value to display an error during form validation
@@ -105,7 +106,7 @@ class OptionsFlowHandler(OptionsFlow):
if DEFAULT_DATA[key]:
result = DEFAULT_DATA[key]
elif key == CONF_REPORT_PATH:
result = get_report_path(self.hass, None)
result = await async_get_report_path(self.hass, None)
if isinstance(result, list):
return ", ".join([str(i) for i in result])
@@ -130,9 +131,7 @@ class OptionsFlowHandler(OptionsFlow):
errors[key] = f"invalid_{key}"
return val, errors
async def _show_options_form(
self, uinput=None, errors=None, placehoders=None
): # pylint: disable=unused-argument
async def _show_options_form(self, uinput=None, errors=None, placehoders=None): # pylint: disable=unused-argument
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
@@ -140,19 +139,23 @@ class OptionsFlowHandler(OptionsFlow):
vol.Optional(
CONF_SERVICE_NAME,
description={
"suggested_value": self.default(CONF_SERVICE_NAME, uinput)
"suggested_value": await self.async_default(
CONF_SERVICE_NAME, uinput
)
},
): cv.string,
vol.Optional(
CONF_SERVICE_DATA2,
description={
"suggested_value": self.default(CONF_SERVICE_DATA2, uinput)
"suggested_value": await self.async_default(
CONF_SERVICE_DATA2, uinput
)
},
): selector.TemplateSelector(),
vol.Optional(
CONF_INCLUDED_FOLDERS,
description={
"suggested_value": self.default(
"suggested_value": await self.async_default(
CONF_INCLUDED_FOLDERS, uinput
)
},
@@ -162,19 +165,25 @@ class OptionsFlowHandler(OptionsFlow):
vol.Optional(
CONF_HEADER,
description={
"suggested_value": self.default(CONF_HEADER, uinput)
"suggested_value": await self.async_default(
CONF_HEADER, uinput
)
},
): cv.string,
vol.Optional(
CONF_REPORT_PATH,
description={
"suggested_value": self.default(CONF_REPORT_PATH, uinput)
"suggested_value": await self.async_default(
CONF_REPORT_PATH, uinput
)
},
): cv.string,
vol.Optional(
CONF_IGNORED_ITEMS,
description={
"suggested_value": self.default(CONF_IGNORED_ITEMS, uinput)
"suggested_value": await self.async_default(
CONF_IGNORED_ITEMS, uinput
)
},
): selector.TextSelector(
selector.TextSelectorConfig(multiline=True)
@@ -182,7 +191,9 @@ class OptionsFlowHandler(OptionsFlow):
vol.Optional(
CONF_IGNORED_STATES,
description={
"suggested_value": self.default(CONF_IGNORED_STATES, uinput)
"suggested_value": await self.async_default(
CONF_IGNORED_STATES, uinput
)
},
): selector.TextSelector(
selector.TextSelectorConfig(multiline=True)
@@ -190,13 +201,17 @@ class OptionsFlowHandler(OptionsFlow):
vol.Optional(
CONF_CHUNK_SIZE,
description={
"suggested_value": self.default(CONF_CHUNK_SIZE, uinput)
"suggested_value": await self.async_default(
CONF_CHUNK_SIZE, uinput
)
},
): cv.positive_int,
vol.Optional(
CONF_IGNORED_FILES,
description={
"suggested_value": self.default(CONF_IGNORED_FILES, uinput)
"suggested_value": await self.async_default(
CONF_IGNORED_FILES, uinput
)
},
): selector.TextSelector(
selector.TextSelectorConfig(multiline=True)
@@ -204,25 +219,33 @@ class OptionsFlowHandler(OptionsFlow):
vol.Optional(
CONF_COLUMNS_WIDTH,
description={
"suggested_value": self.default(CONF_COLUMNS_WIDTH, uinput)
"suggested_value": await self.async_default(
CONF_COLUMNS_WIDTH, uinput
)
},
): cv.string,
vol.Optional(
CONF_STARTUP_DELAY,
description={
"suggested_value": self.default(CONF_STARTUP_DELAY, uinput)
"suggested_value": await self.async_default(
CONF_STARTUP_DELAY, uinput
)
},
): cv.positive_int,
vol.Optional(
CONF_FRIENDLY_NAMES,
description={
"suggested_value": self.default(CONF_FRIENDLY_NAMES, uinput)
"suggested_value": await self.async_default(
CONF_FRIENDLY_NAMES, uinput
)
},
): cv.boolean,
vol.Optional(
CONF_CHECK_LOVELACE,
description={
"suggested_value": self.default(CONF_CHECK_LOVELACE, uinput)
"suggested_value": await self.async_default(
CONF_CHECK_LOVELACE, uinput
)
},
): cv.boolean,
}

View File

@@ -1,9 +1,10 @@
"definition of constants"
from homeassistant.const import Platform
DOMAIN = "watchman"
DOMAIN_DATA = "watchman_data"
VERSION = "0.6.1"
VERSION = "0.6.3"
DEFAULT_REPORT_FILENAME = "watchman_report.txt"
DEFAULT_HEADER = "-== WATCHMAN REPORT ==- "

View File

@@ -1,7 +1,7 @@
"""Represents Watchman service in the device registry of Home Assistant"""
from homeassistant.helpers.entity import DeviceInfo, EntityDescription
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,

View File

@@ -1,15 +1,15 @@
{
"domain": "watchman",
"name": "Watchman",
"documentation": "https://github.com/dummylabs/thewatchman",
"issue_tracker": "https://github.com/dummylabs/thewatchman/issues",
"iot_class": "local_push",
"version": "0.5.1",
"requirements": [
"prettytable==3.0.0"
],
"codeowners": [
"@dummylabs"
],
"config_flow": true
"config_flow": true,
"documentation": "https://github.com/dummylabs/thewatchman",
"iot_class": "local_push",
"issue_tracker": "https://github.com/dummylabs/thewatchman/issues",
"requirements": [
"prettytable==3.10.0"
],
"version": "0.6.3"
}

View File

@@ -1,9 +1,12 @@
"""Watchman sensors definition"""
import logging
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.components.sensor.const import (
SensorDeviceClass,
SensorStateClass,
)
from homeassistant.core import callback

View File

@@ -2,45 +2,33 @@ report:
description: Run watchman report
fields:
create_file:
description: Whether report file should be created (optional, true by default)
example: true
name: Create file report
default: true
required: false
selector:
boolean:
send_notification:
description: Whether report should be sent via notification service (optional, false by default)
example: true
name: Send notification
default: false
required: false
selector:
boolean:
service:
description: Notification service to send report via (optional). Overrides "service" setting from watchman configuration
example: "notify.telegram"
name: Notification service
required: false
selector:
text:
data:
description: Additional data in form of key:value pairs for notification service (optional)
example: "parse_mode: html"
name: Notification service data parameters
parse_config:
description: Parse configuration files before report is created. Usually this is done by watchman automatically, so this flag is not required. (optional, false by default)
example: true
name: Parse configuration
default: false
required: false
selector:
boolean:
chunk_size:
description: Maximum message size in bytes. If report size exceeds chunk_size, the report will be sent in several subsequent notifications. (optional, default is 3500 or whatever specified in integration settings)
example: true
name: Chunk size
default: false
required: false
selector:

View File

@@ -41,5 +41,37 @@
"description": "[Help on settings](https://github.com/dummylabs/thewatchman#configuration)"
}
}
},
"services": {
"report": {
"name": "Report",
"description": "Run the Watchman report",
"fields": {
"create_file": {
"name": "Create file report",
"description": "Whether report file should be created (optional, true by default)"
},
"send_notification": {
"name": "Send notification",
"description": "Whether report should be sent via notification service (optional, false by default)"
},
"service": {
"name": "Notification service",
"description": "Notification service to send report via (optional). Overrides 'service' setting from watchman configuration"
},
"data": {
"name": "Notification service data parameters",
"description": "Additional data in form of key:value pairs for notification service (optional)"
},
"parse_config": {
"name": "Force configuration parsing",
"description": "Parse configuration files before report is created. Usually this is done by watchman automatically, so this flag is not required. (optional, false by default)"
},
"chunk_size": {
"name": "Report chunk size",
"description": "Maximum message size in bytes. If report size exceeds chunk_size, the report will be sent in several subsequent notifications. (optional, default is 3500 or whatever specified in integration settings)"
}
}
}
}
}

View File

@@ -1,5 +1,6 @@
"""Miscellaneous support functions for watchman"""
import glob
import anyio
import re
import fnmatch
import time
@@ -58,12 +59,12 @@ def get_config(hass: HomeAssistant, key, default):
return hass.data[DOMAIN_DATA].get(key, default)
def get_report_path(hass, path):
async def async_get_report_path(hass, path):
"""if path not specified, create report in config directory with default filename"""
if not path:
path = hass.config.path(DEFAULT_REPORT_FILENAME)
folder, _ = os.path.split(path)
if not os.path.exists(folder):
if not await anyio.Path(folder).exists():
raise HomeAssistantError(f"Incorrect report_path: {path}.")
return path
@@ -147,16 +148,25 @@ def text_renderer(hass, entry_type):
return f"Text render error: unknown entry type: {entry_type}"
def get_next_file(folder_list, ignored_files):
async def async_get_next_file(folder_tuples, ignored_files):
"""Returns next file for scan"""
if not ignored_files:
ignored_files = ""
else:
ignored_files = "|".join([f"({fnmatch.translate(f)})" for f in ignored_files])
ignored_files_re = re.compile(ignored_files)
for folder in folder_list:
for filename in glob.iglob(folder, recursive=True):
yield (filename, (ignored_files and ignored_files_re.match(filename)))
for folder_name, glob_pattern in folder_tuples:
_LOGGER.debug(
"Scan folder %s with pattern %s for configuration files",
folder_name,
glob_pattern,
)
async for filename in anyio.Path(folder_name).glob(glob_pattern):
_LOGGER.debug("Found file %s.", filename)
yield (
str(filename),
(ignored_files and ignored_files_re.match(str(filename))),
)
def add_entry(_list, entry, yaml_file, lineno):
@@ -231,7 +241,7 @@ def check_entitites(hass):
return entities_missing
def parse(hass, folders, ignored_files, root=None):
async def parse(hass, folders, ignored_files, root=None):
"""Parse a yaml or json file for entities/services"""
files_parsed = 0
entity_pattern = re.compile(
@@ -247,7 +257,7 @@ def parse(hass, folders, ignored_files, root=None):
service_list = {}
effectively_ignored = []
_LOGGER.debug("::parse started")
for yaml_file, ignored in get_next_file(folders, ignored_files):
async for yaml_file, ignored in async_get_next_file(folders, ignored_files):
short_path = os.path.relpath(yaml_file, root)
if ignored:
effectively_ignored.append(short_path)
@@ -255,19 +265,24 @@ def parse(hass, folders, ignored_files, root=None):
continue
try:
for i, line in enumerate(open(yaml_file, encoding="utf-8")):
line = re.sub(comment_pattern, "", line)
for match in re.finditer(entity_pattern, line):
typ, val = match.group(1), match.group(2)
if (
typ != "service:"
and "*" not in val
and not val.endswith(".yaml")
):
add_entry(entity_list, val, short_path, i + 1)
for match in re.finditer(service_pattern, line):
val = match.group(1)
add_entry(service_list, val, short_path, i + 1)
lineno = 1
async with await anyio.open_file(
yaml_file, mode="r", encoding="utf-8"
) as f:
async for line in f:
line = re.sub(comment_pattern, "", line)
for match in re.finditer(entity_pattern, line):
typ, val = match.group(1), match.group(2)
if (
typ != "service:"
and "*" not in val
and not val.endswith(".yaml")
):
add_entry(entity_list, val, short_path, lineno)
for match in re.finditer(service_pattern, line):
val = match.group(1)
add_entry(service_list, val, short_path, lineno)
lineno += 1
files_parsed += 1
_LOGGER.debug("%s parsed", yaml_file)
except OSError as exception:
@@ -312,9 +327,9 @@ def fill(data, width, extra=None):
)
def report(hass, render, chunk_size, test_mode=False):
async def report(hass, render, chunk_size, test_mode=False):
"""generates watchman report either as a table or as a list"""
if not DOMAIN in hass.data:
if DOMAIN not in hass.data:
raise HomeAssistantError("No data for report, refresh required.")
start_time = time.time()
@@ -354,7 +369,11 @@ def report(hass, render, chunk_size, test_mode=False):
rep += "your config are available!\n"
else:
rep += "\n-== No entities found in configuration files!\n"
timezone = pytz.timezone(hass.config.time_zone)
def get_timezone(hass):
return pytz.timezone(hass.config.time_zone)
timezone = await hass.async_add_executor_job(get_timezone, hass)
if not test_mode:
report_datetime = datetime.now(timezone).strftime("%d %b %Y %H:%M:%S")

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 MiB

View File

@@ -28,19 +28,19 @@ input_datetime:
has_date: false
has_time: true
automation:
- alias: Irrigation Unlimited Load UI Controls
trigger:
- platform: homeassistant
event: start
action:
- service: irrigation_unlimited.list_config
data:
entity_id: input_select.irrigation_unlimited_entities
section: entities
first: <none>
- service: irrigation_unlimited.list_config
data:
entity_id: input_select.irrigation_unlimited_sequences
section: sequences
first: <none>
# automation:
# - alias: Irrigation Unlimited Load UI Controls
# trigger:
# - platform: homeassistant
# event: start
# action:
# - service: irrigation_unlimited.list_config
# data:
# entity_id: input_select.irrigation_unlimited_entities
# section: entities
# first: <none>
# - service: irrigation_unlimited.list_config
# data:
# entity_id: input_select.irrigation_unlimited_sequences
# section: sequences
# first: <none>

View File

@@ -1,15 +1,68 @@
'1717141935702':
aorus_reboot:
alias: Aorus - Reboot
sequence:
- service: mqtt.publish
data:
topic: iotlink/home/aorus/commands/reboot
payload: ''
aorus_shutdown:
alias: Aorus - Shutdown
sequence:
- service: mqtt.publish
data:
topic: iotlink/home/aorus/commands/shutdown
payload: ''
aorus_test:
alias: Aorus - test notify
sequence:
- service: mqtt.publish
data:
topic: iotlink/home/aorus/commands/notify
payload: '{ "title": "This is a test", "message": "Click here to go to flemmingss.com",
"launchParams": "toast://open/http://flemmingss.com" }'
wt32_sc01_wake_up:
alias: WT32 sc01 wake up
sequence:
- service: mqtt.publish
data:
topic: hasp/plate1/command
payload: 'backlight {"state": 1, "brightness": 232}'
qos: 0
retain: false
mode: single
wt32_sc01_sleep:
alias: WT32 sc01 Sleep
sequence:
- service: mqtt.publish
data:
topic: hasp/plate1/command
payload: 'backlight {"state": 0, "brightness": 232}'
qos: 0
retain: false
mode: single
purge_database:
alias: purge database
sequence:
- service: recorder.purge
data:
repack: true
apply_filter: true
keep_days: 5
keep_days: 2
mode: single
'1717142010757':
clear_log:
alias: clear log
sequence:
- service: system_log.clear
data: {}
mode: single
description: ''
watchman_report:
alias: watchman_report
sequence:
- service: watchman.report
data:
create_file: true
send_notification: false
parse_config: true
chunk_size: false
description: ''

252
config/shopping_list.json Normal file
View File

@@ -0,0 +1,252 @@
[
{
"name": "bananes",
"id": "ccd87c7b0ab34b2cab13e13f1c15b953",
"complete": false
},
{
"name": "tomates en boites",
"id": "40f3826179cc4015abf05e854f0805b7",
"complete": false
},
{
"name": "cafe",
"id": "2ab5e9e8e0bf4b66bf3709d42bf1862f",
"complete": false
},
{
"name": "coca",
"id": "1fe1c6819f364efe8fb24f2076004884",
"complete": false
},
{
"name": "lait",
"id": "b25f67fed52d48cd95f8e5ff2c19445a",
"complete": false
},
{
"name": "Mandarinew",
"id": "399b702e0f5d4c668122aa443d12f5d7",
"complete": false
},
{
"name": "pates",
"id": "a9afb0dc45d34be59590f75feaca1298",
"complete": false
},
{
"name": "pq",
"id": "3552aa87fe114404afaa30756b915792",
"complete": false
},
{
"name": "saussice strasbourg",
"id": "58722f42ca894eaebf69e33141747d3f",
"complete": false
},
{
"name": "sucre",
"id": "48d77e0babb94314a53fb8eb5b982aec",
"complete": false
},
{
"name": "tranche rostte",
"id": "77f5606c8d0f42ce956a92ea4d1a4a2e",
"complete": false
},
{
"name": "2 pack 6 bouteille",
"id": "221a160c86e84932bb042a1c30152e3f",
"complete": false
},
{
"name": "fromage frais1kg",
"id": "4a444956e0fa4090864d91271285faea",
"complete": false
},
{
"name": "I3 les flottantes",
"id": "714e46d3bba245c98e996fca78aa5833",
"complete": false
},
{
"name": "Desset pistache",
"id": "264e93338d3a4240bf4f37f845495ba4",
"complete": false
},
{
"name": "Desset vanille",
"id": "cdd79df53003437781b11b8a63ca4f27",
"complete": false
},
{
"name": "reIlle d or praline",
"id": "40120f3a21d545c5b55c13ff6d376cd8",
"complete": false
},
{
"name": "Oranges",
"id": "262d56e80d5b43c29f5d1af87aff7547",
"complete": false
},
{
"name": "Poires",
"id": "b25d659bcddb4417a855c3cf9ca52fcb",
"complete": false
},
{
"name": "Kiwi",
"id": "8140a3c06f1c4dd4ba4d7023a1aa3d1c",
"complete": false
},
{
"name": "2 baguette",
"id": "ae4b83ce164a4116a9dfe1cc8f2d259b",
"complete": false
},
{
"name": "Jambon",
"id": "79f0e8fd4c7f488b9f836102c441723b",
"complete": false
},
{
"name": "Beurre",
"id": "fa90d569094d43d39b271698a8313dae",
"complete": false
},
{
"name": "2 boules 2 flutes",
"id": "700c9e55513e4218bef02e8eae7d2b0f",
"complete": false
},
{
"name": "Lessives",
"id": "f10694ffdf0141b0ab25ee37e0e4c3b2",
"complete": false
},
{
"name": "2 Sirop cassis ",
"id": "883245620efd4937af7f7609585eba7b",
"complete": false
},
{
"name": "yaourt nature",
"id": "0bfffb52067248f9ba1864adf90a593a",
"complete": false
},
{
"name": "2 flute",
"id": "6d61713c94734fda95d94a3476e58a48",
"complete": false
},
{
"name": "6 bouteille demi écrémé ",
"id": "58a3db5909504e6593fdd34f6b225632",
"complete": false
},
{
"name": "Cassis",
"id": "6a6e687e3ab8424eb358db3a21704b80",
"complete": false
},
{
"name": "Lave vaisselle",
"id": "b872a56028fc4b5ba024b352ad0bb295",
"complete": false
},
{
"name": "Compote",
"id": "0a6dfe1e9c92467c90dce7e4389bd991",
"complete": false
},
{
"name": "Salade de fruit",
"id": "f2c3c5ad219843d5a036a4c18e2a2c82",
"complete": false
},
{
"name": "Huile 3l",
"id": "2f1e4c7ad7764bccadc6450863ecce1c",
"complete": false
},
{
"name": "Fromage bleu",
"id": "04f2c6de49a24bd687900e1027d2dd04",
"complete": false
},
{
"name": "Fromage brebis frais",
"id": "df16358fba7b46028672e99684abb109",
"complete": false
},
{
"name": "2 Flute ",
"id": "91f695f1522f4e798ffe43067c458f23",
"complete": false
},
{
"name": "Nettoyant sol",
"id": "6a899a016d834f03a6148fdae419b8dc",
"complete": false
},
{
"name": "Lave vitre",
"id": "6e34d4fd6a604c028c32a86e0732203b",
"complete": false
},
{
"name": "Filtre n4 cafe",
"id": "4f3f5ae1015946028e9e534592ab62c5",
"complete": false
},
{
"name": "Caroote rouge",
"id": "9ac5ffb47ea34888baeb6652272df190",
"complete": false
},
{
"name": "Filet de truite nature ",
"id": "49901e8349ed48e2ac2e4484dc16a1ad",
"complete": false
},
{
"name": "sirop cassis",
"id": "faa629cedc88434293e8264b909dc488",
"complete": false
},
{
"name": "Dates",
"id": "0089116b159040ce903f4547f2ebdcce",
"complete": false
},
{
"name": "Choco 2",
"id": "bd74c95f73f54c139ee79b017fa45c5b",
"complete": false
},
{
"name": "Petis pois 2",
"id": "8d185228bd1048ba9e0c292fc1794586",
"complete": false
},
{
"name": "Sucre",
"id": "3a0bea850f9b462e95598c797fc44756",
"complete": false
},
{
"name": "Clementine",
"id": "5077437d5c1449669de10206b5ff5ff8",
"complete": false
},
{
"name": "Haricot 2",
"id": "a7860c075ee940529bd3d41b910c1abf",
"complete": false
},
{
"name": "Yaourt nature",
"id": "d351d89d82fd4aeba8ded4b99c63bed0",
"complete": false
}
]

View File

@@ -0,0 +1,7 @@
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
QyNTUxOQAAACCjnJmRLV4OxZQCGOpuRVlDoFEia05K7XV+iWL2OPLKkwAAAJiE9VJMhPVS
TAAAAAtzc2gtZWQyNTUxOQAAACCjnJmRLV4OxZQCGOpuRVlDoFEia05K7XV+iWL2OPLKkw
AAAEC8EH07wKOzCuA5TPVu6VnHuYhDvTOPiuOJlm1r9nYcnaOcmZEtXg7FlAIY6m5FWUOg
USJrTkrtdX6JYvY48sqTAAAAEXJvb3RAYTBkN2I5NTQtc3NoAQIDBA==
-----END OPENSSH PRIVATE KEY-----

Some files were not shown because too many files have changed in this diff Show More