commit after moving to VM and upgrading to 107
parent
01dd11a8ff
commit
5584532549
|
@ -0,0 +1 @@
|
||||||
|
0.107.7
|
|
@ -0,0 +1,7 @@
|
||||||
|
/.storage/
|
||||||
|
/.vscode/
|
||||||
|
/.cloud/
|
||||||
|
*.code-workspace
|
||||||
|
*.js.gz
|
||||||
|
*.log
|
||||||
|
*.db
|
|
@ -0,0 +1,7 @@
|
||||||
|
- type: homeassistant
|
||||||
|
# - type: trusted_networks
|
||||||
|
# trusted_networks:
|
||||||
|
# - 10.0.0.3
|
||||||
|
# - 127.0.0.1
|
||||||
|
# - ::1
|
||||||
|
# allow_bypass_login: true
|
|
@ -0,0 +1,8 @@
|
||||||
|
- alias: Update Available Notification
|
||||||
|
trigger:
|
||||||
|
platform: state
|
||||||
|
entity_id: updater.updater
|
||||||
|
action:
|
||||||
|
service: notify.martin
|
||||||
|
data:
|
||||||
|
message: "Update for Home Assistant is available."
|
|
@ -0,0 +1,2 @@
|
||||||
|
fan.fan_state:
|
||||||
|
friendly_name: This is a dummy entry to be overwrrten
|
|
@ -0,0 +1,11 @@
|
||||||
|
# https://home-assistant.io/components/http/
|
||||||
|
# ssl_certificate: !secret http_ssl_certificate
|
||||||
|
# ssl_key: !secret http_ssl_key
|
||||||
|
# base_url: !secret http_base_url
|
||||||
|
# ip_ban_enabled: False
|
||||||
|
# login_attempts_threshold: 5
|
||||||
|
# base_url: https://ha238.kebler.net
|
||||||
|
#use_x_forwarded_for: true
|
||||||
|
# You must set the trusted proxy IP address so that Home Assistant will properly accept connections
|
||||||
|
# Set this to your Caddy machine IP, or localhost if hosted on the same machine.
|
||||||
|
#trusted_proxies: 127.0.0.1
|
|
@ -0,0 +1,18 @@
|
||||||
|
# https://www.home-assistant.io/components/mqtt/
|
||||||
|
|
||||||
|
# these four are now set in the mqtt integrations ui
|
||||||
|
# there is no way to edit these. You must remove and add the mqtt integration
|
||||||
|
#broker: 10.0.0.1
|
||||||
|
#port: 1883
|
||||||
|
#username: !secret MQTT_username
|
||||||
|
#password: !secret MQTT_password
|
||||||
|
birth_message:
|
||||||
|
topic: "status/ha"
|
||||||
|
payload: "online"
|
||||||
|
qos: 1
|
||||||
|
retain: true
|
||||||
|
will_message:
|
||||||
|
topic: "status/ha"
|
||||||
|
payload: "offline"
|
||||||
|
qos: 1
|
||||||
|
retain: true
|
|
@ -0,0 +1,4 @@
|
||||||
|
nodered:
|
||||||
|
title: Node-RED
|
||||||
|
icon: mdi:nodejs
|
||||||
|
url: https://ha.kebler.net/hassio/ingress/a0d7b954_nodered
|
|
@ -0,0 +1,4 @@
|
||||||
|
|
||||||
|
# Use this file to store secrets like usernames and passwords.
|
||||||
|
# Learn more at https://home-assistant.io/docs/configuration/secrets/
|
||||||
|
some_password: welcome
|
|
@ -0,0 +1,27 @@
|
||||||
|
homeassistant:
|
||||||
|
# see packages/aa_system.yaml for basic settings
|
||||||
|
customize: !include configs/customize.yaml
|
||||||
|
auth_providers: !include configs/auth.yaml
|
||||||
|
packages: !include_dir_named packages
|
||||||
|
# discovery:
|
||||||
|
default_config:
|
||||||
|
# CONIG FILES
|
||||||
|
# group: !include configs/groups.yaml
|
||||||
|
automation: !include configs/automations.yaml
|
||||||
|
# script: !include configs/scripts.yaml
|
||||||
|
# http: !include configs/http.yaml
|
||||||
|
# mqtt: !include configs/mqtt.yaml
|
||||||
|
# panel_iframe: !include configs/panel.yaml
|
||||||
|
lovelace:
|
||||||
|
mode: yaml
|
||||||
|
# needed for any time of day related triggers
|
||||||
|
sensor:
|
||||||
|
- platform: time_date
|
||||||
|
display_options:
|
||||||
|
- 'time'
|
||||||
|
- 'date'
|
||||||
|
- 'date_time'
|
||||||
|
- 'date_time_iso'
|
||||||
|
- 'time_date'
|
||||||
|
- 'time_utc'
|
||||||
|
- 'beat'
|
|
@ -0,0 +1,39 @@
|
||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"single_instance_allowed": "Only a single configuration of HACS is allowed."
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"auth": "Personal Access Token is not correct."
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"token": "GitHub Personal Access Token",
|
||||||
|
"appdaemon": "Enable AppDaemon apps discovery & tracking",
|
||||||
|
"sidepanel_icon": "Side panel icon",
|
||||||
|
"sidepanel_title": "Side panel title"
|
||||||
|
},
|
||||||
|
"description": "If you need help with the configuration have a look here: https:\/\/hacs.xyz\/docs\/configuration\/start",
|
||||||
|
"title": "HACS (Home Assistant Community Store)"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"title": "HACS (Home Assistant Community Store)"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"not_in_use": "Not in use with YAML",
|
||||||
|
"country": "Filter with country code.",
|
||||||
|
"experimental": "Enable experimental features",
|
||||||
|
"release_limit": "Number of releases to show.",
|
||||||
|
"debug": "Enable debug.",
|
||||||
|
"appdaemon": "Enable AppDaemon apps discovery & tracking",
|
||||||
|
"sidepanel_icon": "Side panel icon",
|
||||||
|
"sidepanel_title": "Side panel title"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,221 @@
|
||||||
|
"""
|
||||||
|
Custom element manager for community created elements.
|
||||||
|
|
||||||
|
For more details about this integration, please refer to the documentation at
|
||||||
|
https://hacs.xyz/
|
||||||
|
"""
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
from aiogithubapi import AIOGitHub
|
||||||
|
from homeassistant import config_entries
|
||||||
|
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
||||||
|
from homeassistant.const import __version__ as HAVERSION
|
||||||
|
from homeassistant.components.lovelace import system_health_info
|
||||||
|
from homeassistant.exceptions import ConfigEntryNotReady, ServiceNotFound
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||||
|
from homeassistant.helpers.event import async_call_later
|
||||||
|
|
||||||
|
from custom_components.hacs.configuration_schema import (
|
||||||
|
hacs_base_config_schema,
|
||||||
|
hacs_config_option_schema,
|
||||||
|
)
|
||||||
|
from custom_components.hacs.const import DOMAIN, ELEMENT_TYPES, STARTUP, VERSION
|
||||||
|
from custom_components.hacs.constrains import check_constans, check_requirements
|
||||||
|
from custom_components.hacs.hacsbase.configuration import Configuration
|
||||||
|
from custom_components.hacs.hacsbase.data import HacsData
|
||||||
|
from custom_components.hacs.setup import (
|
||||||
|
add_sensor,
|
||||||
|
load_hacs_repository,
|
||||||
|
setup_frontend,
|
||||||
|
)
|
||||||
|
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
from custom_components.hacs.helpers.network import internet_connectivity_check
|
||||||
|
|
||||||
|
SCHEMA = hacs_base_config_schema()
|
||||||
|
SCHEMA[vol.Optional("options")] = hacs_config_option_schema()
|
||||||
|
CONFIG_SCHEMA = vol.Schema({DOMAIN: SCHEMA}, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass, config):
|
||||||
|
"""Set up this integration using yaml."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
if DOMAIN not in config:
|
||||||
|
return True
|
||||||
|
hass.data[DOMAIN] = config
|
||||||
|
hacs.hass = hass
|
||||||
|
hacs.session = async_create_clientsession(hass)
|
||||||
|
hacs.configuration = Configuration.from_dict(
|
||||||
|
config[DOMAIN], config[DOMAIN].get("options")
|
||||||
|
)
|
||||||
|
hacs.configuration.config = config
|
||||||
|
hacs.configuration.config_type = "yaml"
|
||||||
|
await startup_wrapper_for_yaml()
|
||||||
|
hass.async_create_task(
|
||||||
|
hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass, config_entry):
|
||||||
|
"""Set up this integration using UI."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
conf = hass.data.get(DOMAIN)
|
||||||
|
if config_entry.source == config_entries.SOURCE_IMPORT:
|
||||||
|
if conf is None:
|
||||||
|
hass.async_create_task(
|
||||||
|
hass.config_entries.async_remove(config_entry.entry_id)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
hacs.hass = hass
|
||||||
|
hacs.session = async_create_clientsession(hass)
|
||||||
|
hacs.configuration = Configuration.from_dict(
|
||||||
|
config_entry.data, config_entry.options
|
||||||
|
)
|
||||||
|
hacs.configuration.config_type = "flow"
|
||||||
|
hacs.configuration.config_entry = config_entry
|
||||||
|
config_entry.add_update_listener(reload_hacs)
|
||||||
|
startup_result = await hacs_startup()
|
||||||
|
if not startup_result:
|
||||||
|
hacs.system.disabled = True
|
||||||
|
raise ConfigEntryNotReady
|
||||||
|
hacs.system.disabled = False
|
||||||
|
return startup_result
|
||||||
|
|
||||||
|
|
||||||
|
async def startup_wrapper_for_yaml():
|
||||||
|
"""Startup wrapper for yaml config."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
startup_result = await hacs_startup()
|
||||||
|
if not startup_result:
|
||||||
|
hacs.system.disabled = True
|
||||||
|
hacs.hass.components.frontend.async_remove_panel(
|
||||||
|
hacs.configuration.sidepanel_title.lower()
|
||||||
|
.replace(" ", "_")
|
||||||
|
.replace("-", "_")
|
||||||
|
)
|
||||||
|
hacs.logger.info("Could not setup HACS, trying again in 15 min")
|
||||||
|
async_call_later(hacs.hass, 900, startup_wrapper_for_yaml())
|
||||||
|
return
|
||||||
|
hacs.system.disabled = False
|
||||||
|
|
||||||
|
|
||||||
|
async def hacs_startup():
|
||||||
|
"""HACS startup tasks."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
if not check_requirements():
|
||||||
|
return False
|
||||||
|
if hacs.configuration.debug:
|
||||||
|
try:
|
||||||
|
await hacs.hass.services.async_call(
|
||||||
|
"logger", "set_level", {"hacs": "debug"}
|
||||||
|
)
|
||||||
|
except ServiceNotFound:
|
||||||
|
hacs.logger.error(
|
||||||
|
"Could not set logging level to debug, logger is not enabled"
|
||||||
|
)
|
||||||
|
|
||||||
|
lovelace_info = await system_health_info(hacs.hass)
|
||||||
|
hacs.logger.debug(f"Configuration type: {hacs.configuration.config_type}")
|
||||||
|
hacs.version = VERSION
|
||||||
|
hacs.logger.info(STARTUP)
|
||||||
|
hacs.system.config_path = hacs.hass.config.path()
|
||||||
|
hacs.system.ha_version = HAVERSION
|
||||||
|
|
||||||
|
hacs.system.lovelace_mode = lovelace_info.get("mode", "yaml")
|
||||||
|
hacs.system.disabled = False
|
||||||
|
hacs.github = AIOGitHub(
|
||||||
|
hacs.configuration.token, async_create_clientsession(hacs.hass)
|
||||||
|
)
|
||||||
|
hacs.data = HacsData()
|
||||||
|
|
||||||
|
# Check HACS Constrains
|
||||||
|
if not await hacs.hass.async_add_executor_job(check_constans):
|
||||||
|
if hacs.configuration.config_type == "flow":
|
||||||
|
if hacs.configuration.config_entry is not None:
|
||||||
|
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Set up frontend
|
||||||
|
await setup_frontend()
|
||||||
|
|
||||||
|
if not await hacs.hass.async_add_executor_job(internet_connectivity_check):
|
||||||
|
hacs.logger.critical("No network connectivity")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Load HACS
|
||||||
|
if not await load_hacs_repository():
|
||||||
|
if hacs.configuration.config_type == "flow":
|
||||||
|
if hacs.configuration.config_entry is not None:
|
||||||
|
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Restore from storefiles
|
||||||
|
if not await hacs.data.restore():
|
||||||
|
hacs_repo = hacs.get_by_name("hacs/integration")
|
||||||
|
hacs_repo.pending_restart = True
|
||||||
|
if hacs.configuration.config_type == "flow":
|
||||||
|
if hacs.configuration.config_entry is not None:
|
||||||
|
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Add aditional categories
|
||||||
|
hacs.common.categories = ELEMENT_TYPES
|
||||||
|
if hacs.configuration.appdaemon:
|
||||||
|
hacs.common.categories.append("appdaemon")
|
||||||
|
if hacs.configuration.python_script:
|
||||||
|
hacs.configuration.python_script = False
|
||||||
|
if hacs.configuration.config_type == "yaml":
|
||||||
|
hacs.logger.warning(
|
||||||
|
"Configuration option 'python_script' is deprecated and you should remove it from your configuration, HACS will know if you use 'python_script' in your Home Assistant configuration, this option will be removed in a future release."
|
||||||
|
)
|
||||||
|
if hacs.configuration.theme:
|
||||||
|
hacs.configuration.theme = False
|
||||||
|
if hacs.configuration.config_type == "yaml":
|
||||||
|
hacs.logger.warning(
|
||||||
|
"Configuration option 'theme' is deprecated and you should remove it from your configuration, HACS will know if you use 'theme' in your Home Assistant configuration, this option will be removed in a future release."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Setup startup tasks
|
||||||
|
if hacs.configuration.config_type == "yaml":
|
||||||
|
hacs.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, hacs.startup_tasks())
|
||||||
|
else:
|
||||||
|
async_call_later(hacs.hass, 5, hacs.startup_tasks())
|
||||||
|
|
||||||
|
# Show the configuration
|
||||||
|
hacs.configuration.print()
|
||||||
|
|
||||||
|
# Set up sensor
|
||||||
|
await hacs.hass.async_add_executor_job(add_sensor)
|
||||||
|
|
||||||
|
# Mischief managed!
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_remove_entry(hass, config_entry):
|
||||||
|
"""Handle removal of an entry."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
hacs.logger.info("Disabling HACS")
|
||||||
|
hacs.logger.info("Removing recuring tasks")
|
||||||
|
for task in hacs.recuring_tasks:
|
||||||
|
task()
|
||||||
|
hacs.logger.info("Removing sensor")
|
||||||
|
try:
|
||||||
|
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
hacs.logger.info("Removing sidepanel")
|
||||||
|
try:
|
||||||
|
hass.components.frontend.async_remove_panel("hacs")
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
hacs.system.disabled = True
|
||||||
|
hacs.logger.info("HACS is now disabled")
|
||||||
|
|
||||||
|
|
||||||
|
async def reload_hacs(hass, config_entry):
|
||||||
|
"""Reload HACS."""
|
||||||
|
await async_remove_entry(hass, config_entry)
|
||||||
|
await async_setup_entry(hass, config_entry)
|
|
@ -0,0 +1,102 @@
|
||||||
|
"""Adds config flow for HACS."""
|
||||||
|
# pylint: disable=dangerous-default-value
|
||||||
|
import logging
|
||||||
|
import voluptuous as vol
|
||||||
|
from aiogithubapi import AIOGitHubException, AIOGitHubAuthentication
|
||||||
|
from homeassistant import config_entries
|
||||||
|
from homeassistant.core import callback
|
||||||
|
from homeassistant.helpers import aiohttp_client
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .configuration_schema import hacs_base_config_schema, hacs_config_option_schema
|
||||||
|
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
from custom_components.hacs.helpers.information import get_repository
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class HacsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Config flow for HACS."""
|
||||||
|
|
||||||
|
VERSION = 1
|
||||||
|
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize."""
|
||||||
|
self._errors = {}
|
||||||
|
|
||||||
|
async def async_step_user(self, user_input={}):
|
||||||
|
"""Handle a flow initialized by the user."""
|
||||||
|
self._errors = {}
|
||||||
|
if self._async_current_entries():
|
||||||
|
return self.async_abort(reason="single_instance_allowed")
|
||||||
|
if self.hass.data.get(DOMAIN):
|
||||||
|
return self.async_abort(reason="single_instance_allowed")
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
if await self._test_token(user_input["token"]):
|
||||||
|
return self.async_create_entry(title="", data=user_input)
|
||||||
|
|
||||||
|
self._errors["base"] = "auth"
|
||||||
|
return await self._show_config_form(user_input)
|
||||||
|
|
||||||
|
return await self._show_config_form(user_input)
|
||||||
|
|
||||||
|
async def _show_config_form(self, user_input):
|
||||||
|
"""Show the configuration form to edit location data."""
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=vol.Schema(hacs_base_config_schema(user_input)),
|
||||||
|
errors=self._errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@callback
|
||||||
|
def async_get_options_flow(config_entry):
|
||||||
|
return HacsOptionsFlowHandler(config_entry)
|
||||||
|
|
||||||
|
async def async_step_import(self, user_input):
|
||||||
|
"""Import a config entry.
|
||||||
|
Special type of import, we're not actually going to store any data.
|
||||||
|
Instead, we're going to rely on the values that are in config file.
|
||||||
|
"""
|
||||||
|
if self._async_current_entries():
|
||||||
|
return self.async_abort(reason="single_instance_allowed")
|
||||||
|
|
||||||
|
return self.async_create_entry(title="configuration.yaml", data={})
|
||||||
|
|
||||||
|
async def _test_token(self, token):
|
||||||
|
"""Return true if token is valid."""
|
||||||
|
try:
|
||||||
|
session = aiohttp_client.async_get_clientsession(self.hass)
|
||||||
|
await get_repository(session, token, "hacs/org")
|
||||||
|
return True
|
||||||
|
except (AIOGitHubException, AIOGitHubAuthentication) as exception:
|
||||||
|
_LOGGER.error(exception)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class HacsOptionsFlowHandler(config_entries.OptionsFlow):
|
||||||
|
"""HACS config flow options handler."""
|
||||||
|
|
||||||
|
def __init__(self, config_entry):
|
||||||
|
"""Initialize HACS options flow."""
|
||||||
|
self.config_entry = config_entry
|
||||||
|
|
||||||
|
async def async_step_init(self, user_input=None):
|
||||||
|
"""Manage the options."""
|
||||||
|
return await self.async_step_user()
|
||||||
|
|
||||||
|
async def async_step_user(self, user_input=None):
|
||||||
|
"""Handle a flow initialized by the user."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
if user_input is not None:
|
||||||
|
return self.async_create_entry(title="", data=user_input)
|
||||||
|
|
||||||
|
if hacs.configuration.config_type == "yaml":
|
||||||
|
schema = {vol.Optional("not_in_use", default=""): str}
|
||||||
|
else:
|
||||||
|
schema = hacs_config_option_schema(self.config_entry.options)
|
||||||
|
|
||||||
|
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))
|
|
@ -0,0 +1,62 @@
|
||||||
|
"""HACS Configuration Schemas."""
|
||||||
|
# pylint: disable=dangerous-default-value
|
||||||
|
import voluptuous as vol
|
||||||
|
from .const import LOCALE
|
||||||
|
|
||||||
|
# Configuration:
|
||||||
|
TOKEN = "token"
|
||||||
|
SIDEPANEL_TITLE = "sidepanel_title"
|
||||||
|
SIDEPANEL_ICON = "sidepanel_icon"
|
||||||
|
APPDAEMON = "appdaemon"
|
||||||
|
PYTHON_SCRIPT = "python_script"
|
||||||
|
THEME = "theme"
|
||||||
|
|
||||||
|
# Options:
|
||||||
|
COUNTRY = "country"
|
||||||
|
DEBUG = "debug"
|
||||||
|
RELEASE_LIMIT = "release_limit"
|
||||||
|
EXPERIMENTAL = "experimental"
|
||||||
|
|
||||||
|
|
||||||
|
def hacs_base_config_schema(config: dict = {}) -> dict:
|
||||||
|
"""Return a shcema configuration dict for HACS."""
|
||||||
|
if not config:
|
||||||
|
config = {
|
||||||
|
TOKEN: "xxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||||
|
SIDEPANEL_ICON: "mdi:alpha-c-box",
|
||||||
|
SIDEPANEL_TITLE: "HACS",
|
||||||
|
APPDAEMON: False,
|
||||||
|
PYTHON_SCRIPT: False,
|
||||||
|
THEME: False,
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
vol.Required(TOKEN, default=config.get(TOKEN)): str,
|
||||||
|
vol.Optional(SIDEPANEL_TITLE, default=config.get(SIDEPANEL_TITLE)): str,
|
||||||
|
vol.Optional(SIDEPANEL_ICON, default=config.get(SIDEPANEL_ICON)): str,
|
||||||
|
vol.Optional(APPDAEMON, default=config.get(APPDAEMON)): bool,
|
||||||
|
vol.Optional(PYTHON_SCRIPT, default=config.get(PYTHON_SCRIPT)): bool,
|
||||||
|
vol.Optional(THEME, default=config.get(THEME)): bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def hacs_config_option_schema(options: dict = {}) -> dict:
|
||||||
|
"""Return a shcema for HACS configuration options."""
|
||||||
|
if not options:
|
||||||
|
options = {COUNTRY: "ALL", DEBUG: False, RELEASE_LIMIT: 5, EXPERIMENTAL: False}
|
||||||
|
return {
|
||||||
|
vol.Optional(COUNTRY, default=options.get(COUNTRY)): vol.In(LOCALE),
|
||||||
|
vol.Optional(RELEASE_LIMIT, default=options.get(RELEASE_LIMIT)): int,
|
||||||
|
vol.Optional(EXPERIMENTAL, default=options.get(EXPERIMENTAL)): bool,
|
||||||
|
vol.Optional(DEBUG, default=options.get(DEBUG)): bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def hacs_config_combined() -> dict:
|
||||||
|
"""Combine the configuration options."""
|
||||||
|
base = hacs_base_config_schema()
|
||||||
|
options = hacs_config_option_schema()
|
||||||
|
|
||||||
|
for option in options:
|
||||||
|
base[option] = options[option]
|
||||||
|
|
||||||
|
return base
|
|
@ -0,0 +1,293 @@
|
||||||
|
"""Constants for HACS"""
|
||||||
|
NAME_LONG = "HACS (Home Assistant Community Store)"
|
||||||
|
NAME_SHORT = "HACS"
|
||||||
|
VERSION = "master"
|
||||||
|
DOMAIN = "hacs"
|
||||||
|
PROJECT_URL = "https://github.com/hacs/integration/"
|
||||||
|
CUSTOM_UPDATER_LOCATIONS = [
|
||||||
|
"{}/custom_components/custom_updater.py",
|
||||||
|
"{}/custom_components/custom_updater/__init__.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
ISSUE_URL = f"{PROJECT_URL}issues"
|
||||||
|
DOMAIN_DATA = f"{NAME_SHORT.lower()}_data"
|
||||||
|
|
||||||
|
ELEMENT_TYPES = ["integration", "plugin"]
|
||||||
|
|
||||||
|
IFRAME = {
|
||||||
|
"title": "Community",
|
||||||
|
"icon": "mdi:alpha-c-box",
|
||||||
|
"url": "/community_overview",
|
||||||
|
"path": "community",
|
||||||
|
"require_admin": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Messages
|
||||||
|
CUSTOM_UPDATER_WARNING = """
|
||||||
|
This cannot be used with custom_updater.
|
||||||
|
To use this you need to remove custom_updater form {}
|
||||||
|
"""
|
||||||
|
|
||||||
|
STARTUP = f"""
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
HACS (Home Assistant Community Store)
|
||||||
|
|
||||||
|
Version: {VERSION}
|
||||||
|
This is a custom integration
|
||||||
|
If you have any issues with this you need to open an issue here:
|
||||||
|
https://github.com/hacs/integration/issues
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
"""
|
||||||
|
|
||||||
|
LOCALE = [
|
||||||
|
"ALL",
|
||||||
|
"AF",
|
||||||
|
"AL",
|
||||||
|
"DZ",
|
||||||
|
"AS",
|
||||||
|
"AD",
|
||||||
|
"AO",
|
||||||
|
"AI",
|
||||||
|
"AQ",
|
||||||
|
"AG",
|
||||||
|
"AR",
|
||||||
|
"AM",
|
||||||
|
"AW",
|
||||||
|
"AU",
|
||||||
|
"AT",
|
||||||
|
"AZ",
|
||||||
|
"BS",
|
||||||
|
"BH",
|
||||||
|
"BD",
|
||||||
|
"BB",
|
||||||
|
"BY",
|
||||||
|
"BE",
|
||||||
|
"BZ",
|
||||||
|
"BJ",
|
||||||
|
"BM",
|
||||||
|
"BT",
|
||||||
|
"BO",
|
||||||
|
"BQ",
|
||||||
|
"BA",
|
||||||
|
"BW",
|
||||||
|
"BV",
|
||||||
|
"BR",
|
||||||
|
"IO",
|
||||||
|
"BN",
|
||||||
|
"BG",
|
||||||
|
"BF",
|
||||||
|
"BI",
|
||||||
|
"KH",
|
||||||
|
"CM",
|
||||||
|
"CA",
|
||||||
|
"CV",
|
||||||
|
"KY",
|
||||||
|
"CF",
|
||||||
|
"TD",
|
||||||
|
"CL",
|
||||||
|
"CN",
|
||||||
|
"CX",
|
||||||
|
"CC",
|
||||||
|
"CO",
|
||||||
|
"KM",
|
||||||
|
"CG",
|
||||||
|
"CD",
|
||||||
|
"CK",
|
||||||
|
"CR",
|
||||||
|
"HR",
|
||||||
|
"CU",
|
||||||
|
"CW",
|
||||||
|
"CY",
|
||||||
|
"CZ",
|
||||||
|
"CI",
|
||||||
|
"DK",
|
||||||
|
"DJ",
|
||||||
|
"DM",
|
||||||
|
"DO",
|
||||||
|
"EC",
|
||||||
|
"EG",
|
||||||
|
"SV",
|
||||||
|
"GQ",
|
||||||
|
"ER",
|
||||||
|
"EE",
|
||||||
|
"ET",
|
||||||
|
"FK",
|
||||||
|
"FO",
|
||||||
|
"FJ",
|
||||||
|
"FI",
|
||||||
|
"FR",
|
||||||
|
"GF",
|
||||||
|
"PF",
|
||||||
|
"TF",
|
||||||
|
"GA",
|
||||||
|
"GM",
|
||||||
|
"GE",
|
||||||
|
"DE",
|
||||||
|
"GH",
|
||||||
|
"GI",
|
||||||
|
"GR",
|
||||||
|
"GL",
|
||||||
|
"GD",
|
||||||
|
"GP",
|
||||||
|
"GU",
|
||||||
|
"GT",
|
||||||
|
"GG",
|
||||||
|
"GN",
|
||||||
|
"GW",
|
||||||
|
"GY",
|
||||||
|
"HT",
|
||||||
|
"HM",
|
||||||
|
"VA",
|
||||||
|
"HN",
|
||||||
|
"HK",
|
||||||
|
"HU",
|
||||||
|
"IS",
|
||||||
|
"IN",
|
||||||
|
"ID",
|
||||||
|
"IR",
|
||||||
|
"IQ",
|
||||||
|
"IE",
|
||||||
|
"IM",
|
||||||
|
"IL",
|
||||||
|
"IT",
|
||||||
|
"JM",
|
||||||
|
"JP",
|
||||||
|
"JE",
|
||||||
|
"JO",
|
||||||
|
"KZ",
|
||||||
|
"KE",
|
||||||
|
"KI",
|
||||||
|
"KP",
|
||||||
|
"KR",
|
||||||
|
"KW",
|
||||||
|
"KG",
|
||||||
|
"LA",
|
||||||
|
"LV",
|
||||||
|
"LB",
|
||||||
|
"LS",
|
||||||
|
"LR",
|
||||||
|
"LY",
|
||||||
|
"LI",
|
||||||
|
"LT",
|
||||||
|
"LU",
|
||||||
|
"MO",
|
||||||
|
"MK",
|
||||||
|
"MG",
|
||||||
|
"MW",
|
||||||
|
"MY",
|
||||||
|
"MV",
|
||||||
|
"ML",
|
||||||
|
"MT",
|
||||||
|
"MH",
|
||||||
|
"MQ",
|
||||||
|
"MR",
|
||||||
|
"MU",
|
||||||
|
"YT",
|
||||||
|
"MX",
|
||||||
|
"FM",
|
||||||
|
"MD",
|
||||||
|
"MC",
|
||||||
|
"MN",
|
||||||
|
"ME",
|
||||||
|
"MS",
|
||||||
|
"MA",
|
||||||
|
"MZ",
|
||||||
|
"MM",
|
||||||
|
"NA",
|
||||||
|
"NR",
|
||||||
|
"NP",
|
||||||
|
"NL",
|
||||||
|
"NC",
|
||||||
|
"NZ",
|
||||||
|
"NI",
|
||||||
|
"NE",
|
||||||
|
"NG",
|
||||||
|
"NU",
|
||||||
|
"NF",
|
||||||
|
"MP",
|
||||||
|
"NO",
|
||||||
|
"OM",
|
||||||
|
"PK",
|
||||||
|
"PW",
|
||||||
|
"PS",
|
||||||
|
"PA",
|
||||||
|
"PG",
|
||||||
|
"PY",
|
||||||
|
"PE",
|
||||||
|
"PH",
|
||||||
|
"PN",
|
||||||
|
"PL",
|
||||||
|
"PT",
|
||||||
|
"PR",
|
||||||
|
"QA",
|
||||||
|
"RO",
|
||||||
|
"RU",
|
||||||
|
"RW",
|
||||||
|
"RE",
|
||||||
|
"BL",
|
||||||
|
"SH",
|
||||||
|
"KN",
|
||||||
|
"LC",
|
||||||
|
"MF",
|
||||||
|
"PM",
|
||||||
|
"VC",
|
||||||
|
"WS",
|
||||||
|
"SM",
|
||||||
|
"ST",
|
||||||
|
"SA",
|
||||||
|
"SN",
|
||||||
|
"RS",
|
||||||
|
"SC",
|
||||||
|
"SL",
|
||||||
|
"SG",
|
||||||
|
"SX",
|
||||||
|
"SK",
|
||||||
|
"SI",
|
||||||
|
"SB",
|
||||||
|
"SO",
|
||||||
|
"ZA",
|
||||||
|
"GS",
|
||||||
|
"SS",
|
||||||
|
"ES",
|
||||||
|
"LK",
|
||||||
|
"SD",
|
||||||
|
"SR",
|
||||||
|
"SJ",
|
||||||
|
"SZ",
|
||||||
|
"SE",
|
||||||
|
"CH",
|
||||||
|
"SY",
|
||||||
|
"TW",
|
||||||
|
"TJ",
|
||||||
|
"TZ",
|
||||||
|
"TH",
|
||||||
|
"TL",
|
||||||
|
"TG",
|
||||||
|
"TK",
|
||||||
|
"TO",
|
||||||
|
"TT",
|
||||||
|
"TN",
|
||||||
|
"TR",
|
||||||
|
"TM",
|
||||||
|
"TC",
|
||||||
|
"TV",
|
||||||
|
"UG",
|
||||||
|
"UA",
|
||||||
|
"AE",
|
||||||
|
"GB",
|
||||||
|
"US",
|
||||||
|
"UM",
|
||||||
|
"UY",
|
||||||
|
"UZ",
|
||||||
|
"VU",
|
||||||
|
"VE",
|
||||||
|
"VN",
|
||||||
|
"VG",
|
||||||
|
"VI",
|
||||||
|
"WF",
|
||||||
|
"EH",
|
||||||
|
"YE",
|
||||||
|
"ZM",
|
||||||
|
"ZW",
|
||||||
|
]
|
|
@ -0,0 +1,99 @@
|
||||||
|
"""HACS Startup constrains."""
|
||||||
|
# pylint: disable=bad-continuation
|
||||||
|
import os
|
||||||
|
|
||||||
|
from .const import CUSTOM_UPDATER_LOCATIONS, CUSTOM_UPDATER_WARNING
|
||||||
|
from .helpers.misc import version_left_higher_then_right
|
||||||
|
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
|
||||||
|
MINIMUM_HA_VERSION = "0.98.0"
|
||||||
|
|
||||||
|
|
||||||
|
def check_constans():
|
||||||
|
"""Check HACS constrains."""
|
||||||
|
if not constrain_translations():
|
||||||
|
return False
|
||||||
|
if not constrain_custom_updater():
|
||||||
|
return False
|
||||||
|
if not constrain_version():
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def constrain_custom_updater():
|
||||||
|
"""Check if custom_updater exist."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
for location in CUSTOM_UPDATER_LOCATIONS:
|
||||||
|
if os.path.exists(location.format(hacs.system.config_path)):
|
||||||
|
msg = CUSTOM_UPDATER_WARNING.format(
|
||||||
|
location.format(hacs.system.config_path)
|
||||||
|
)
|
||||||
|
hacs.logger.critical(msg)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def constrain_version():
|
||||||
|
"""Check if the version is valid."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
if not version_left_higher_then_right(hacs.system.ha_version, MINIMUM_HA_VERSION):
|
||||||
|
hacs.logger.critical(
|
||||||
|
f"You need HA version {MINIMUM_HA_VERSION} or newer to use this integration."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def constrain_translations():
|
||||||
|
"""Check if traslations exist."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
if not os.path.exists(
|
||||||
|
f"{hacs.system.config_path}/custom_components/hacs/.translations"
|
||||||
|
):
|
||||||
|
hacs.logger.critical("You are missing the translations directory.")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def check_requirements():
|
||||||
|
"""Check the requirements"""
|
||||||
|
missing = []
|
||||||
|
try:
|
||||||
|
from aiogithubapi import AIOGitHubException # pylint: disable=unused-import
|
||||||
|
except ImportError:
|
||||||
|
missing.append("aiogithubapi")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from hacs_frontend import locate_gz # pylint: disable=unused-import
|
||||||
|
except ImportError:
|
||||||
|
missing.append("hacs_frontend")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import semantic_version # pylint: disable=unused-import
|
||||||
|
except ImportError:
|
||||||
|
missing.append("semantic_version")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from integrationhelper import Logger # pylint: disable=unused-import
|
||||||
|
except ImportError:
|
||||||
|
missing.append("integrationhelper")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import backoff # pylint: disable=unused-import
|
||||||
|
except ImportError:
|
||||||
|
missing.append("backoff")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import aiofiles # pylint: disable=unused-import
|
||||||
|
except ImportError:
|
||||||
|
missing.append("aiofiles")
|
||||||
|
|
||||||
|
if missing:
|
||||||
|
hacs = get_hacs()
|
||||||
|
for requirement in missing:
|
||||||
|
hacs.logger.critical(
|
||||||
|
f"Required python requirement '{requirement}' is not installed"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
return True
|
|
@ -0,0 +1,29 @@
|
||||||
|
# pylint: disable=invalid-name, missing-docstring
|
||||||
|
hacs = []
|
||||||
|
removed_repositories = []
|
||||||
|
|
||||||
|
|
||||||
|
def get_hacs():
|
||||||
|
if not hacs:
|
||||||
|
from custom_components.hacs.hacsbase import Hacs
|
||||||
|
|
||||||
|
hacs.append(Hacs())
|
||||||
|
|
||||||
|
return hacs[0]
|
||||||
|
|
||||||
|
|
||||||
|
def is_removed(repository):
|
||||||
|
return repository in [x.repository for x in removed_repositories]
|
||||||
|
|
||||||
|
|
||||||
|
def get_removed(repository):
|
||||||
|
if not is_removed(repository):
|
||||||
|
from custom_components.hacs.repositories.removed import RemovedRepository
|
||||||
|
|
||||||
|
removed_repo = RemovedRepository()
|
||||||
|
removed_repo.repository = repository
|
||||||
|
removed_repositories.append(removed_repo)
|
||||||
|
filter_repos = [
|
||||||
|
x for x in removed_repositories if x.repository.lower() == repository.lower()
|
||||||
|
]
|
||||||
|
return filter_repos[0]
|
|
@ -0,0 +1,355 @@
|
||||||
|
"""Initialize the HACS base."""
|
||||||
|
# pylint: disable=unused-argument, bad-continuation
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||||
|
|
||||||
|
from aiogithubapi import AIOGitHubException, AIOGitHubRatelimit
|
||||||
|
from integrationhelper import Logger
|
||||||
|
|
||||||
|
from custom_components.hacs.hacsbase.task_factory import HacsTaskFactory
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
|
||||||
|
from custom_components.hacs.const import ELEMENT_TYPES
|
||||||
|
from custom_components.hacs.setup import setup_extra_stores
|
||||||
|
from custom_components.hacs.store import async_load_from_store, async_save_to_store
|
||||||
|
from custom_components.hacs.helpers.get_defaults import (
|
||||||
|
get_default_repos_lists,
|
||||||
|
get_default_repos_orgs,
|
||||||
|
)
|
||||||
|
|
||||||
|
from custom_components.hacs.helpers.register_repository import register_repository
|
||||||
|
from custom_components.hacs.globals import removed_repositories, get_removed, is_removed
|
||||||
|
from custom_components.hacs.repositories.removed import RemovedRepository
|
||||||
|
|
||||||
|
|
||||||
|
class HacsStatus:
|
||||||
|
"""HacsStatus."""
|
||||||
|
|
||||||
|
startup = True
|
||||||
|
new = False
|
||||||
|
background_task = False
|
||||||
|
reloading_data = False
|
||||||
|
upgrading_all = False
|
||||||
|
|
||||||
|
|
||||||
|
class HacsFrontend:
|
||||||
|
"""HacsFrontend."""
|
||||||
|
|
||||||
|
version_running = None
|
||||||
|
version_available = None
|
||||||
|
update_pending = False
|
||||||
|
|
||||||
|
|
||||||
|
class HacsCommon:
|
||||||
|
"""Common for HACS."""
|
||||||
|
|
||||||
|
categories = []
|
||||||
|
default = []
|
||||||
|
installed = []
|
||||||
|
skip = []
|
||||||
|
|
||||||
|
|
||||||
|
class System:
|
||||||
|
"""System info."""
|
||||||
|
|
||||||
|
status = HacsStatus()
|
||||||
|
config_path = None
|
||||||
|
ha_version = None
|
||||||
|
disabled = False
|
||||||
|
lovelace_mode = "storage"
|
||||||
|
|
||||||
|
|
||||||
|
class Developer:
|
||||||
|
"""Developer settings/tools."""
|
||||||
|
|
||||||
|
template_id = "Repository ID"
|
||||||
|
template_content = ""
|
||||||
|
template_raw = ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def devcontainer(self):
|
||||||
|
"""Is it a devcontainer?"""
|
||||||
|
import os
|
||||||
|
|
||||||
|
if "DEVCONTAINER" in os.environ:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Hacs:
|
||||||
|
"""The base class of HACS, nested thoughout the project."""
|
||||||
|
|
||||||
|
token = f"{str(uuid.uuid4())}-{str(uuid.uuid4())}"
|
||||||
|
hacsweb = f"/hacsweb/{token}"
|
||||||
|
hacsapi = f"/hacsapi/{token}"
|
||||||
|
repositories = []
|
||||||
|
frontend = HacsFrontend()
|
||||||
|
repo = None
|
||||||
|
data_repo = None
|
||||||
|
developer = Developer()
|
||||||
|
data = None
|
||||||
|
configuration = None
|
||||||
|
logger = Logger("hacs")
|
||||||
|
github = None
|
||||||
|
hass = None
|
||||||
|
version = None
|
||||||
|
session = None
|
||||||
|
factory = HacsTaskFactory()
|
||||||
|
system = System()
|
||||||
|
recuring_tasks = []
|
||||||
|
common = HacsCommon()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def init(hass, github_token):
|
||||||
|
"""Return a initialized HACS object."""
|
||||||
|
return Hacs()
|
||||||
|
|
||||||
|
def get_by_id(self, repository_id):
|
||||||
|
"""Get repository by ID."""
|
||||||
|
try:
|
||||||
|
for repository in self.repositories:
|
||||||
|
if repository.information.uid == repository_id:
|
||||||
|
return repository
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_by_name(self, repository_full_name):
|
||||||
|
"""Get repository by full_name."""
|
||||||
|
try:
|
||||||
|
for repository in self.repositories:
|
||||||
|
if repository.data.full_name.lower() == repository_full_name.lower():
|
||||||
|
return repository
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
def is_known(self, repository_full_name):
|
||||||
|
"""Return a bool if the repository is known."""
|
||||||
|
return repository_full_name.lower() in [
|
||||||
|
x.data.full_name.lower() for x in self.repositories
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sorted_by_name(self):
|
||||||
|
"""Return a sorted(by name) list of repository objects."""
|
||||||
|
return sorted(self.repositories, key=lambda x: x.display_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sorted_by_repository_name(self):
|
||||||
|
"""Return a sorted(by repository_name) list of repository objects."""
|
||||||
|
return sorted(self.repositories, key=lambda x: x.data.full_name)
|
||||||
|
|
||||||
|
async def register_repository(self, full_name, category, check=True):
|
||||||
|
"""Register a repository."""
|
||||||
|
await register_repository(full_name, category, check=True)
|
||||||
|
|
||||||
|
async def startup_tasks(self):
|
||||||
|
"""Tasks tha are started after startup."""
|
||||||
|
self.system.status.background_task = True
|
||||||
|
await self.hass.async_add_executor_job(setup_extra_stores)
|
||||||
|
self.hass.bus.async_fire("hacs/status", {})
|
||||||
|
self.logger.debug(self.github.ratelimits.remaining)
|
||||||
|
self.logger.debug(self.github.ratelimits.reset_utc)
|
||||||
|
|
||||||
|
await self.handle_critical_repositories_startup()
|
||||||
|
await self.handle_critical_repositories()
|
||||||
|
await self.load_known_repositories()
|
||||||
|
await self.clear_out_removed_repositories()
|
||||||
|
|
||||||
|
self.recuring_tasks.append(
|
||||||
|
async_track_time_interval(
|
||||||
|
self.hass, self.recuring_tasks_installed, timedelta(minutes=30)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.recuring_tasks.append(
|
||||||
|
async_track_time_interval(
|
||||||
|
self.hass, self.recuring_tasks_all, timedelta(minutes=800)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.hass.bus.async_fire("hacs/reload", {"force": True})
|
||||||
|
await self.recuring_tasks_installed()
|
||||||
|
|
||||||
|
self.system.status.startup = False
|
||||||
|
self.system.status.new = False
|
||||||
|
self.system.status.background_task = False
|
||||||
|
self.hass.bus.async_fire("hacs/status", {})
|
||||||
|
await self.data.async_write()
|
||||||
|
|
||||||
|
async def handle_critical_repositories_startup(self):
|
||||||
|
"""Handled critical repositories during startup."""
|
||||||
|
alert = False
|
||||||
|
critical = await async_load_from_store(self.hass, "critical")
|
||||||
|
if not critical:
|
||||||
|
return
|
||||||
|
for repo in critical:
|
||||||
|
if not repo["acknowledged"]:
|
||||||
|
alert = True
|
||||||
|
if alert:
|
||||||
|
self.logger.critical("URGENT!: Check the HACS panel!")
|
||||||
|
self.hass.components.persistent_notification.create(
|
||||||
|
title="URGENT!", message="**Check the HACS panel!**"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def handle_critical_repositories(self):
|
||||||
|
"""Handled critical repositories during runtime."""
|
||||||
|
# Get critical repositories
|
||||||
|
instored = []
|
||||||
|
critical = []
|
||||||
|
was_installed = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
critical = await self.data_repo.get_contents("critical")
|
||||||
|
critical = json.loads(critical.content)
|
||||||
|
except AIOGitHubException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not critical:
|
||||||
|
self.logger.debug("No critical repositories")
|
||||||
|
return
|
||||||
|
|
||||||
|
stored_critical = await async_load_from_store(self.hass, "critical")
|
||||||
|
|
||||||
|
for stored in stored_critical or []:
|
||||||
|
instored.append(stored["repository"])
|
||||||
|
|
||||||
|
stored_critical = []
|
||||||
|
|
||||||
|
for repository in critical:
|
||||||
|
removed_repo = get_removed(repository["repository"])
|
||||||
|
removed_repo.removal_type = "critical"
|
||||||
|
repo = self.get_by_name(repository["repository"])
|
||||||
|
|
||||||
|
stored = {
|
||||||
|
"repository": repository["repository"],
|
||||||
|
"reason": repository["reason"],
|
||||||
|
"link": repository["link"],
|
||||||
|
"acknowledged": True,
|
||||||
|
}
|
||||||
|
if repository["repository"] not in instored:
|
||||||
|
if repo is not None and repo.installed:
|
||||||
|
self.logger.critical(
|
||||||
|
f"Removing repository {repository['repository']}, it is marked as critical"
|
||||||
|
)
|
||||||
|
was_installed = True
|
||||||
|
stored["acknowledged"] = False
|
||||||
|
# Uninstall from HACS
|
||||||
|
repo.remove()
|
||||||
|
await repo.uninstall()
|
||||||
|
stored_critical.append(stored)
|
||||||
|
removed_repo.update_data(stored)
|
||||||
|
|
||||||
|
# Save to FS
|
||||||
|
await async_save_to_store(self.hass, "critical", stored_critical)
|
||||||
|
|
||||||
|
# Resart HASS
|
||||||
|
if was_installed:
|
||||||
|
self.logger.critical("Resarting Home Assistant")
|
||||||
|
self.hass.async_create_task(self.hass.async_stop(100))
|
||||||
|
|
||||||
|
async def recuring_tasks_installed(self, notarealarg=None):
|
||||||
|
"""Recuring tasks for installed repositories."""
|
||||||
|
self.logger.debug(
|
||||||
|
"Starting recuring background task for installed repositories"
|
||||||
|
)
|
||||||
|
self.system.status.background_task = True
|
||||||
|
self.hass.bus.async_fire("hacs/status", {})
|
||||||
|
self.logger.debug(self.github.ratelimits.remaining)
|
||||||
|
self.logger.debug(self.github.ratelimits.reset_utc)
|
||||||
|
for repository in self.repositories:
|
||||||
|
if (
|
||||||
|
repository.status.installed
|
||||||
|
and repository.data.category in self.common.categories
|
||||||
|
):
|
||||||
|
self.factory.tasks.append(self.factory.safe_update(repository))
|
||||||
|
|
||||||
|
await self.factory.execute()
|
||||||
|
await self.handle_critical_repositories()
|
||||||
|
self.system.status.background_task = False
|
||||||
|
self.hass.bus.async_fire("hacs/status", {})
|
||||||
|
await self.data.async_write()
|
||||||
|
self.logger.debug("Recuring background task for installed repositories done")
|
||||||
|
|
||||||
|
async def recuring_tasks_all(self, notarealarg=None):
|
||||||
|
"""Recuring tasks for all repositories."""
|
||||||
|
self.logger.debug("Starting recuring background task for all repositories")
|
||||||
|
await self.hass.async_add_executor_job(setup_extra_stores)
|
||||||
|
self.system.status.background_task = True
|
||||||
|
self.hass.bus.async_fire("hacs/status", {})
|
||||||
|
self.logger.debug(self.github.ratelimits.remaining)
|
||||||
|
self.logger.debug(self.github.ratelimits.reset_utc)
|
||||||
|
for repository in self.repositories:
|
||||||
|
if repository.data.category in self.common.categories:
|
||||||
|
self.factory.tasks.append(self.factory.safe_common_update(repository))
|
||||||
|
|
||||||
|
await self.factory.execute()
|
||||||
|
await self.load_known_repositories()
|
||||||
|
await self.clear_out_removed_repositories()
|
||||||
|
self.system.status.background_task = False
|
||||||
|
await self.data.async_write()
|
||||||
|
self.hass.bus.async_fire("hacs/status", {})
|
||||||
|
self.hass.bus.async_fire("hacs/repository", {"action": "reload"})
|
||||||
|
self.logger.debug("Recuring background task for all repositories done")
|
||||||
|
|
||||||
|
async def clear_out_removed_repositories(self):
|
||||||
|
"""Clear out blaclisted repositories."""
|
||||||
|
need_to_save = False
|
||||||
|
for removed in removed_repositories:
|
||||||
|
if self.is_known(removed.repository):
|
||||||
|
repository = self.get_by_name(removed.repository)
|
||||||
|
if repository.status.installed and removed.removal_type != "critical":
|
||||||
|
self.logger.warning(
|
||||||
|
f"You have {repository.data.full_name} installed with HACS "
|
||||||
|
+ f"this repository has been removed, please consider removing it. "
|
||||||
|
+ f"Removal reason ({removed.removal_type})"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
need_to_save = True
|
||||||
|
repository.remove()
|
||||||
|
|
||||||
|
if need_to_save:
|
||||||
|
await self.data.async_write()
|
||||||
|
|
||||||
|
async def get_repositories(self):
|
||||||
|
"""Return a list of repositories."""
|
||||||
|
repositories = {}
|
||||||
|
for category in self.common.categories:
|
||||||
|
repositories[category] = await get_default_repos_lists(
|
||||||
|
self.session, self.configuration.token, category
|
||||||
|
)
|
||||||
|
org = await get_default_repos_orgs(self.github, category)
|
||||||
|
for repo in org:
|
||||||
|
repositories[category].append(repo)
|
||||||
|
|
||||||
|
for category in repositories:
|
||||||
|
for repo in repositories[category]:
|
||||||
|
if repo not in self.common.default:
|
||||||
|
self.common.default.append(repo)
|
||||||
|
return repositories
|
||||||
|
|
||||||
|
async def load_known_repositories(self):
|
||||||
|
"""Load known repositories."""
|
||||||
|
self.logger.info("Loading known repositories")
|
||||||
|
repositories = await self.get_repositories()
|
||||||
|
|
||||||
|
for item in await get_default_repos_lists(
|
||||||
|
self.session, self.configuration.token, "removed"
|
||||||
|
):
|
||||||
|
removed = get_removed(item["repository"])
|
||||||
|
removed.reason = item.get("reason")
|
||||||
|
removed.link = item.get("link")
|
||||||
|
removed.removal_type = item.get("removal_type")
|
||||||
|
|
||||||
|
for category in repositories:
|
||||||
|
for repo in repositories[category]:
|
||||||
|
if is_removed(repo):
|
||||||
|
continue
|
||||||
|
if self.is_known(repo):
|
||||||
|
continue
|
||||||
|
self.factory.tasks.append(self.factory.safe_register(repo, category))
|
||||||
|
await self.factory.execute()
|
||||||
|
self.logger.info("Loading known repositories finished")
|
|
@ -0,0 +1,72 @@
|
||||||
|
"""Backup."""
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
from integrationhelper import Logger
|
||||||
|
|
||||||
|
BACKUP_PATH = tempfile.gettempdir() + "/hacs_backup/"
|
||||||
|
|
||||||
|
|
||||||
|
class Backup:
|
||||||
|
"""Backup."""
|
||||||
|
|
||||||
|
def __init__(self, local_path, backup_path=BACKUP_PATH):
|
||||||
|
"""initialize."""
|
||||||
|
self.logger = Logger("hacs.backup")
|
||||||
|
self.local_path = local_path
|
||||||
|
self.backup_path = backup_path
|
||||||
|
self.backup_path_full = f"{self.backup_path}{self.local_path.split('/')[-1]}"
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
"""Create a backup in /tmp"""
|
||||||
|
if not os.path.exists(self.local_path):
|
||||||
|
return
|
||||||
|
if os.path.exists(self.backup_path):
|
||||||
|
shutil.rmtree(self.backup_path)
|
||||||
|
while os.path.exists(self.backup_path):
|
||||||
|
sleep(0.1)
|
||||||
|
os.makedirs(self.backup_path, exist_ok=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.path.isfile(self.local_path):
|
||||||
|
shutil.copyfile(self.local_path, self.backup_path_full)
|
||||||
|
os.remove(self.local_path)
|
||||||
|
else:
|
||||||
|
shutil.copytree(self.local_path, self.backup_path_full)
|
||||||
|
shutil.rmtree(self.local_path)
|
||||||
|
while os.path.exists(self.local_path):
|
||||||
|
sleep(0.1)
|
||||||
|
self.logger.debug(
|
||||||
|
f"Backup for {self.local_path}, created in {self.backup_path_full}"
|
||||||
|
)
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
pass
|
||||||
|
|
||||||
|
def restore(self):
|
||||||
|
"""Restore from backup."""
|
||||||
|
if not os.path.exists(self.backup_path_full):
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.isfile(self.backup_path_full):
|
||||||
|
if os.path.exists(self.local_path):
|
||||||
|
os.remove(self.local_path)
|
||||||
|
shutil.copyfile(self.backup_path_full, self.local_path)
|
||||||
|
else:
|
||||||
|
if os.path.exists(self.local_path):
|
||||||
|
shutil.rmtree(self.local_path)
|
||||||
|
while os.path.exists(self.local_path):
|
||||||
|
sleep(0.1)
|
||||||
|
shutil.copytree(self.backup_path_full, self.local_path)
|
||||||
|
self.logger.debug(
|
||||||
|
f"Restored {self.local_path}, from backup {self.backup_path_full}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
"""Cleanup backup files."""
|
||||||
|
if os.path.exists(self.backup_path):
|
||||||
|
shutil.rmtree(self.backup_path)
|
||||||
|
while os.path.exists(self.backup_path):
|
||||||
|
sleep(0.1)
|
||||||
|
self.logger.debug(f"Backup dir {self.backup_path} cleared")
|
|
@ -0,0 +1,71 @@
|
||||||
|
"""HACS Configuration."""
|
||||||
|
import attr
|
||||||
|
from integrationhelper import Logger
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(auto_attribs=True)
|
||||||
|
class Configuration:
|
||||||
|
"""Configuration class."""
|
||||||
|
|
||||||
|
# Main configuration:
|
||||||
|
appdaemon_path: str = "appdaemon/apps/"
|
||||||
|
appdaemon: bool = False
|
||||||
|
config: dict = {}
|
||||||
|
config_entry: dict = {}
|
||||||
|
config_type: str = None
|
||||||
|
debug: bool = False
|
||||||
|
dev: bool = False
|
||||||
|
frontend_mode: str = "Grid"
|
||||||
|
frontend_compact: bool = False
|
||||||
|
options: dict = {}
|
||||||
|
onboarding_done: bool = False
|
||||||
|
plugin_path: str = "www/community/"
|
||||||
|
python_script_path: str = "python_scripts/"
|
||||||
|
python_script: bool = False
|
||||||
|
sidepanel_icon: str = "mdi:alpha-c-box"
|
||||||
|
sidepanel_title: str = "Community"
|
||||||
|
theme_path: str = "themes/"
|
||||||
|
theme: bool = False
|
||||||
|
token: str = None
|
||||||
|
|
||||||
|
# Config options:
|
||||||
|
country: str = "ALL"
|
||||||
|
experimental: bool = False
|
||||||
|
release_limit: int = 5
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
"""Return a dict representation of the configuration."""
|
||||||
|
return self.__dict__
|
||||||
|
|
||||||
|
def print(self):
|
||||||
|
"""Print the current configuration to the log."""
|
||||||
|
logger = Logger("hacs.configuration")
|
||||||
|
config = self.to_json()
|
||||||
|
for key in config:
|
||||||
|
if key in ["config", "config_entry", "options", "token"]:
|
||||||
|
continue
|
||||||
|
logger.debug(f"{key}: {config[key]}")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_dict(configuration: dict, options: dict):
|
||||||
|
"""Set attributes from dicts."""
|
||||||
|
if isinstance(options, bool) or isinstance(configuration.get("options"), bool):
|
||||||
|
raise HacsException("Configuration is not valid.")
|
||||||
|
|
||||||
|
if options is None:
|
||||||
|
options = {}
|
||||||
|
|
||||||
|
if not configuration:
|
||||||
|
raise HacsException("Configuration is not valid.")
|
||||||
|
|
||||||
|
config = Configuration()
|
||||||
|
|
||||||
|
config.config = configuration
|
||||||
|
config.options = options
|
||||||
|
|
||||||
|
for conf_type in [configuration, options]:
|
||||||
|
for key in conf_type:
|
||||||
|
setattr(config, key, conf_type[key])
|
||||||
|
|
||||||
|
return config
|
|
@ -0,0 +1,10 @@
|
||||||
|
"""Constants for HACS"""
|
||||||
|
# pylint: disable=unused-import
|
||||||
|
STORAGE_VERSION = "6"
|
||||||
|
STORENAME = "hacs"
|
||||||
|
|
||||||
|
# Messages
|
||||||
|
NOT_SUPPORTED_HA_VERSION = "You have version '{}' of Home Assistant, but version '{}' of '{}' require version '{}' of Home Assistant, install and upgrades are disabled for this integration untill you upgrade Home Assistant."
|
||||||
|
|
||||||
|
|
||||||
|
NO_ELEMENTS = "No elements to show, open the store to install some awesome stuff."
|
|
@ -0,0 +1,152 @@
|
||||||
|
"""Data handler for HACS."""
|
||||||
|
from integrationhelper import Logger
|
||||||
|
from ..const import VERSION
|
||||||
|
from ..repositories.repository import HacsRepository
|
||||||
|
from ..repositories.manifest import HacsManifest
|
||||||
|
from ..store import async_save_to_store, async_load_from_store
|
||||||
|
|
||||||
|
from custom_components.hacs.globals import get_hacs, removed_repositories, get_removed
|
||||||
|
from custom_components.hacs.helpers.register_repository import register_repository
|
||||||
|
|
||||||
|
|
||||||
|
class HacsData:
|
||||||
|
"""HacsData class."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize."""
|
||||||
|
self.logger = Logger("hacs.data")
|
||||||
|
self.hacs = get_hacs()
|
||||||
|
|
||||||
|
async def async_write(self):
|
||||||
|
"""Write content to the store files."""
|
||||||
|
if self.hacs.system.status.background_task or self.hacs.system.disabled:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.logger.debug("Saving data")
|
||||||
|
|
||||||
|
# Hacs
|
||||||
|
await async_save_to_store(
|
||||||
|
self.hacs.hass,
|
||||||
|
"hacs",
|
||||||
|
{
|
||||||
|
"view": self.hacs.configuration.frontend_mode,
|
||||||
|
"compact": self.hacs.configuration.frontend_compact,
|
||||||
|
"onboarding_done": self.hacs.configuration.onboarding_done,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
await async_save_to_store(
|
||||||
|
self.hacs.hass, "removed", [x.__dict__ for x in removed_repositories]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Repositories
|
||||||
|
content = {}
|
||||||
|
for repository in self.hacs.repositories:
|
||||||
|
if repository.repository_manifest is not None:
|
||||||
|
repository_manifest = repository.repository_manifest.manifest
|
||||||
|
else:
|
||||||
|
repository_manifest = None
|
||||||
|
content[repository.information.uid] = {
|
||||||
|
"authors": repository.data.authors,
|
||||||
|
"category": repository.data.category,
|
||||||
|
"description": repository.data.description,
|
||||||
|
"downloads": repository.releases.downloads,
|
||||||
|
"full_name": repository.data.full_name,
|
||||||
|
"first_install": repository.status.first_install,
|
||||||
|
"hide": repository.status.hide,
|
||||||
|
"installed_commit": repository.versions.installed_commit,
|
||||||
|
"installed": repository.status.installed,
|
||||||
|
"last_commit": repository.versions.available_commit,
|
||||||
|
"last_release_tag": repository.versions.available,
|
||||||
|
"last_updated": repository.information.last_updated,
|
||||||
|
"name": repository.data.name,
|
||||||
|
"new": repository.status.new,
|
||||||
|
"repository_manifest": repository_manifest,
|
||||||
|
"selected_tag": repository.status.selected_tag,
|
||||||
|
"show_beta": repository.status.show_beta,
|
||||||
|
"stars": repository.data.stargazers_count,
|
||||||
|
"topics": repository.data.topics,
|
||||||
|
"version_installed": repository.versions.installed,
|
||||||
|
}
|
||||||
|
|
||||||
|
await async_save_to_store(self.hacs.hass, "repositories", content)
|
||||||
|
self.hacs.hass.bus.async_fire("hacs/repository", {})
|
||||||
|
self.hacs.hass.bus.fire("hacs/config", {})
|
||||||
|
|
||||||
|
async def restore(self):
|
||||||
|
"""Restore saved data."""
|
||||||
|
hacs = await async_load_from_store(self.hacs.hass, "hacs")
|
||||||
|
repositories = await async_load_from_store(self.hacs.hass, "repositories")
|
||||||
|
removed = await async_load_from_store(self.hacs.hass, "removed")
|
||||||
|
try:
|
||||||
|
if not hacs and not repositories:
|
||||||
|
# Assume new install
|
||||||
|
self.hacs.system.status.new = True
|
||||||
|
return True
|
||||||
|
self.logger.info("Restore started")
|
||||||
|
|
||||||
|
# Hacs
|
||||||
|
self.hacs.configuration.frontend_mode = hacs.get("view", "Grid")
|
||||||
|
self.hacs.configuration.frontend_compact = hacs.get("compact", False)
|
||||||
|
self.hacs.configuration.onboarding_done = hacs.get("onboarding_done", False)
|
||||||
|
|
||||||
|
for entry in removed:
|
||||||
|
removed_repo = get_removed(entry["repository"])
|
||||||
|
removed_repo.update_data(entry)
|
||||||
|
|
||||||
|
# Repositories
|
||||||
|
for entry in repositories:
|
||||||
|
repo = repositories[entry]
|
||||||
|
if not self.hacs.is_known(repo["full_name"]):
|
||||||
|
await register_repository(
|
||||||
|
repo["full_name"], repo["category"], False
|
||||||
|
)
|
||||||
|
repository = self.hacs.get_by_name(repo["full_name"])
|
||||||
|
if repository is None:
|
||||||
|
self.logger.error(f"Did not find {repo['full_name']}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Restore repository attributes
|
||||||
|
repository.information.uid = entry
|
||||||
|
await self.hacs.hass.async_add_executor_job(
|
||||||
|
restore_repository_data, repository, repo
|
||||||
|
)
|
||||||
|
|
||||||
|
self.logger.info("Restore done")
|
||||||
|
except Exception as exception: # pylint: disable=broad-except
|
||||||
|
self.logger.critical(f"[{exception}] Restore Failed!")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def restore_repository_data(
|
||||||
|
repository: type(HacsRepository), repository_data: dict
|
||||||
|
) -> None:
|
||||||
|
"""Restore Repository Data"""
|
||||||
|
repository.data.authors = repository_data.get("authors", [])
|
||||||
|
repository.data.description = repository_data.get("description")
|
||||||
|
repository.releases.last_release_object_downloads = repository_data.get("downloads")
|
||||||
|
repository.information.last_updated = repository_data.get("last_updated")
|
||||||
|
repository.data.topics = repository_data.get("topics", [])
|
||||||
|
repository.data.stargazers_count = repository_data.get("stars", 0)
|
||||||
|
repository.releases.last_release = repository_data.get("last_release_tag")
|
||||||
|
repository.status.hide = repository_data.get("hide", False)
|
||||||
|
repository.status.installed = repository_data.get("installed", False)
|
||||||
|
repository.status.new = repository_data.get("new", True)
|
||||||
|
repository.status.selected_tag = repository_data.get("selected_tag")
|
||||||
|
repository.status.show_beta = repository_data.get("show_beta", False)
|
||||||
|
repository.versions.available = repository_data.get("last_release_tag")
|
||||||
|
repository.versions.available_commit = repository_data.get("last_commit")
|
||||||
|
repository.versions.installed = repository_data.get("version_installed")
|
||||||
|
repository.versions.installed_commit = repository_data.get("installed_commit")
|
||||||
|
|
||||||
|
repository.repository_manifest = HacsManifest.from_dict(
|
||||||
|
repository_data.get("repository_manifest", {})
|
||||||
|
)
|
||||||
|
|
||||||
|
if repository.status.installed:
|
||||||
|
repository.status.first_install = False
|
||||||
|
|
||||||
|
if repository_data["full_name"] == "hacs/integration":
|
||||||
|
repository.versions.installed = VERSION
|
||||||
|
repository.status.installed = True
|
|
@ -0,0 +1,9 @@
|
||||||
|
"""Custom Exceptions."""
|
||||||
|
|
||||||
|
|
||||||
|
class HacsException(Exception):
|
||||||
|
"""Super basic."""
|
||||||
|
|
||||||
|
|
||||||
|
class HacsExpectedException(HacsException):
|
||||||
|
"""For stuff that are expected."""
|
|
@ -0,0 +1,75 @@
|
||||||
|
# pylint: disable=missing-docstring,invalid-name
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from datetime import timedelta
|
||||||
|
import asyncio
|
||||||
|
from aiogithubapi import AIOGitHubException
|
||||||
|
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
from custom_components.hacs.helpers.register_repository import register_repository
|
||||||
|
|
||||||
|
|
||||||
|
max_concurrent_tasks = asyncio.Semaphore(15)
|
||||||
|
sleeper = 5
|
||||||
|
|
||||||
|
logger = logging.getLogger("hacs.factory")
|
||||||
|
|
||||||
|
|
||||||
|
class HacsTaskFactory:
|
||||||
|
def __init__(self):
|
||||||
|
self.tasks = []
|
||||||
|
self.running = False
|
||||||
|
|
||||||
|
async def execute(self):
|
||||||
|
if not self.tasks:
|
||||||
|
logger.debug("No tasks to execute")
|
||||||
|
return
|
||||||
|
if self.running:
|
||||||
|
logger.debug("Allready executing tasks")
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
self.running = True
|
||||||
|
logger.info("Processing %s tasks", len(self.tasks))
|
||||||
|
start = time.time()
|
||||||
|
await asyncio.gather(*self.tasks)
|
||||||
|
logger.info(
|
||||||
|
"Task processing of %s tasks completed in %s seconds",
|
||||||
|
len(self.tasks),
|
||||||
|
timedelta(seconds=round(time.time() - start)).seconds,
|
||||||
|
)
|
||||||
|
self.tasks = []
|
||||||
|
self.running = False
|
||||||
|
except RuntimeError:
|
||||||
|
logger.warning("RuntimeError, Clearing current tasks")
|
||||||
|
self.tasks = []
|
||||||
|
self.running = False
|
||||||
|
|
||||||
|
async def safe_common_update(self, repository):
|
||||||
|
async with max_concurrent_tasks:
|
||||||
|
try:
|
||||||
|
await repository.common_update()
|
||||||
|
except (AIOGitHubException, HacsException) as exception:
|
||||||
|
logger.error("%s - %s", repository.data.full_name, exception)
|
||||||
|
|
||||||
|
# Due to GitHub ratelimits we need to sleep a bit
|
||||||
|
await asyncio.sleep(sleeper)
|
||||||
|
|
||||||
|
async def safe_update(self, repository):
|
||||||
|
async with max_concurrent_tasks:
|
||||||
|
try:
|
||||||
|
await repository.update_repository()
|
||||||
|
except (AIOGitHubException, HacsException) as exception:
|
||||||
|
logger.error("%s - %s", repository.data.full_name, exception)
|
||||||
|
|
||||||
|
# Due to GitHub ratelimits we need to sleep a bit
|
||||||
|
await asyncio.sleep(sleeper)
|
||||||
|
|
||||||
|
async def safe_register(self, repo, category):
|
||||||
|
async with max_concurrent_tasks:
|
||||||
|
try:
|
||||||
|
await register_repository(repo, category)
|
||||||
|
except (AIOGitHubException, HacsException) as exception:
|
||||||
|
logger.error("%s - %s", repo, exception)
|
||||||
|
|
||||||
|
# Due to GitHub ratelimits we need to sleep a bit
|
||||||
|
await asyncio.sleep(sleeper)
|
|
@ -0,0 +1 @@
|
||||||
|
"""Initialize handlers."""
|
|
@ -0,0 +1,90 @@
|
||||||
|
"""Download."""
|
||||||
|
import os
|
||||||
|
import gzip
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
import aiofiles
|
||||||
|
import async_timeout
|
||||||
|
from integrationhelper import Logger
|
||||||
|
import backoff
|
||||||
|
from ..hacsbase.exceptions import HacsException
|
||||||
|
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
|
||||||
|
|
||||||
|
@backoff.on_exception(backoff.expo, Exception, max_tries=5)
|
||||||
|
async def async_download_file(url):
|
||||||
|
"""
|
||||||
|
Download files, and return the content.
|
||||||
|
"""
|
||||||
|
hacs = get_hacs()
|
||||||
|
logger = Logger("hacs.download.downloader")
|
||||||
|
if url is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# There is a bug somewhere... TODO: Find that bug....
|
||||||
|
if "tags/" in url:
|
||||||
|
url = url.replace("tags/", "")
|
||||||
|
|
||||||
|
logger.debug(f"Downloading {url}")
|
||||||
|
|
||||||
|
result = None
|
||||||
|
|
||||||
|
with async_timeout.timeout(60, loop=hacs.hass.loop):
|
||||||
|
request = await hacs.session.get(url)
|
||||||
|
|
||||||
|
# Make sure that we got a valid result
|
||||||
|
if request.status == 200:
|
||||||
|
result = await request.read()
|
||||||
|
else:
|
||||||
|
raise HacsException(
|
||||||
|
"Got status code {} when trying to download {}".format(
|
||||||
|
request.status, url
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def async_save_file(location, content):
|
||||||
|
"""Save files."""
|
||||||
|
logger = Logger("hacs.download.save")
|
||||||
|
logger.debug(f"Saving {location}")
|
||||||
|
mode = "w"
|
||||||
|
encoding = "utf-8"
|
||||||
|
errors = "ignore"
|
||||||
|
|
||||||
|
if not isinstance(content, str):
|
||||||
|
mode = "wb"
|
||||||
|
encoding = None
|
||||||
|
errors = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with aiofiles.open(
|
||||||
|
location, mode=mode, encoding=encoding, errors=errors
|
||||||
|
) as outfile:
|
||||||
|
await outfile.write(content)
|
||||||
|
outfile.close()
|
||||||
|
|
||||||
|
# Create gz for .js files
|
||||||
|
if os.path.isfile(location):
|
||||||
|
if location.endswith(".js") or location.endswith(".css"):
|
||||||
|
with open(location, "rb") as f_in:
|
||||||
|
with gzip.open(location + ".gz", "wb") as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
# Remove with 2.0
|
||||||
|
if "themes" in location and location.endswith(".yaml"):
|
||||||
|
filename = location.split("/")[-1]
|
||||||
|
base = location.split("/themes/")[0]
|
||||||
|
combined = f"{base}/themes/{filename}"
|
||||||
|
if os.path.exists(combined):
|
||||||
|
logger.info(f"Removing old theme file {combined}")
|
||||||
|
os.remove(combined)
|
||||||
|
|
||||||
|
except Exception as error: # pylint: disable=broad-except
|
||||||
|
msg = "Could not write data to {} - {}".format(location, error)
|
||||||
|
logger.error(msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return os.path.exists(location)
|
|
@ -0,0 +1,29 @@
|
||||||
|
"""Custom template support."""
|
||||||
|
# pylint: disable=broad-except
|
||||||
|
from jinja2 import Template
|
||||||
|
from integrationhelper import Logger
|
||||||
|
|
||||||
|
|
||||||
|
def render_template(content, context):
|
||||||
|
"""Render templates in content."""
|
||||||
|
# Fix None issues
|
||||||
|
if context.releases.last_release_object is not None:
|
||||||
|
prerelease = context.releases.last_release_object.prerelease
|
||||||
|
else:
|
||||||
|
prerelease = False
|
||||||
|
|
||||||
|
# Render the template
|
||||||
|
try:
|
||||||
|
render = Template(content)
|
||||||
|
render = render.render(
|
||||||
|
installed=context.status.installed,
|
||||||
|
pending_update=context.pending_upgrade,
|
||||||
|
prerelease=prerelease,
|
||||||
|
selected_tag=context.status.selected_tag,
|
||||||
|
version_available=context.releases.last_release,
|
||||||
|
version_installed=context.display_installed_version,
|
||||||
|
)
|
||||||
|
return render
|
||||||
|
except Exception as exception:
|
||||||
|
Logger("hacs.template").debug(exception)
|
||||||
|
return content
|
|
@ -0,0 +1,180 @@
|
||||||
|
"""Helpers to download repository content."""
|
||||||
|
import pathlib
|
||||||
|
import tempfile
|
||||||
|
import zipfile
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
from custom_components.hacs.handler.download import async_download_file, async_save_file
|
||||||
|
from custom_components.hacs.helpers.filters import filter_content_return_one_of_type
|
||||||
|
|
||||||
|
|
||||||
|
class FileInformation:
|
||||||
|
def __init__(self, url, path, name):
|
||||||
|
self.download_url = url
|
||||||
|
self.path = path
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
|
||||||
|
def should_try_releases(repository):
|
||||||
|
"""Return a boolean indicating whether to download releases or not."""
|
||||||
|
if repository.data.zip_release:
|
||||||
|
if repository.data.filename.endswith(".zip"):
|
||||||
|
if repository.ref != repository.data.default_branch:
|
||||||
|
return True
|
||||||
|
if repository.ref == repository.data.default_branch:
|
||||||
|
return False
|
||||||
|
if repository.data.category not in ["plugin", "theme"]:
|
||||||
|
return False
|
||||||
|
if not repository.releases.releases:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def gather_files_to_download(repository):
|
||||||
|
"""Return a list of file objects to be downloaded."""
|
||||||
|
files = []
|
||||||
|
tree = repository.tree
|
||||||
|
ref = f"{repository.ref}".replace("tags/", "")
|
||||||
|
releaseobjects = repository.releases.objects
|
||||||
|
category = repository.data.category
|
||||||
|
remotelocation = repository.content.path.remote
|
||||||
|
|
||||||
|
if should_try_releases(repository):
|
||||||
|
for release in releaseobjects or []:
|
||||||
|
if ref == release.tag_name:
|
||||||
|
for asset in release.assets or []:
|
||||||
|
files.append(asset)
|
||||||
|
if files:
|
||||||
|
return files
|
||||||
|
|
||||||
|
if repository.content.single:
|
||||||
|
for treefile in tree:
|
||||||
|
if treefile.filename == repository.data.file_name:
|
||||||
|
files.append(
|
||||||
|
FileInformation(
|
||||||
|
treefile.download_url, treefile.full_path, treefile.filename
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return files
|
||||||
|
|
||||||
|
if category == "plugin":
|
||||||
|
for treefile in tree:
|
||||||
|
if treefile.path in ["", "dist"]:
|
||||||
|
if remotelocation == "dist" and not treefile.filename.startswith(
|
||||||
|
"dist"
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
if not remotelocation:
|
||||||
|
if not treefile.filename.endswith(".js"):
|
||||||
|
continue
|
||||||
|
if treefile.path != "":
|
||||||
|
continue
|
||||||
|
if not treefile.is_directory:
|
||||||
|
files.append(
|
||||||
|
FileInformation(
|
||||||
|
treefile.download_url, treefile.full_path, treefile.filename
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if files:
|
||||||
|
return files
|
||||||
|
|
||||||
|
if repository.data.content_in_root:
|
||||||
|
if not repository.data.filename:
|
||||||
|
if category == "theme":
|
||||||
|
tree = filter_content_return_one_of_type(
|
||||||
|
repository.tree, "", "yaml", "full_path"
|
||||||
|
)
|
||||||
|
|
||||||
|
for path in tree:
|
||||||
|
if path.is_directory:
|
||||||
|
continue
|
||||||
|
if path.full_path.startswith(repository.content.path.remote):
|
||||||
|
files.append(
|
||||||
|
FileInformation(path.download_url, path.full_path, path.filename)
|
||||||
|
)
|
||||||
|
return files
|
||||||
|
|
||||||
|
|
||||||
|
async def download_zip(repository, validate):
|
||||||
|
"""Download ZIP archive from repository release."""
|
||||||
|
contents = []
|
||||||
|
try:
|
||||||
|
for release in repository.releases.objects:
|
||||||
|
repository.logger.info(
|
||||||
|
f"ref: {repository.ref} --- tag: {release.tag_name}"
|
||||||
|
)
|
||||||
|
if release.tag_name == repository.ref.split("/")[1]:
|
||||||
|
contents = release.assets
|
||||||
|
|
||||||
|
if not contents:
|
||||||
|
return validate
|
||||||
|
|
||||||
|
for content in contents:
|
||||||
|
filecontent = await async_download_file(content.download_url)
|
||||||
|
|
||||||
|
if filecontent is None:
|
||||||
|
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
result = await async_save_file(
|
||||||
|
f"{tempfile.gettempdir()}/{repository.data.filename}", filecontent
|
||||||
|
)
|
||||||
|
with zipfile.ZipFile(
|
||||||
|
f"{tempfile.gettempdir()}/{repository.data.filename}", "r"
|
||||||
|
) as zip_file:
|
||||||
|
zip_file.extractall(repository.content.path.local)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
repository.logger.info(f"download of {content.name} complete")
|
||||||
|
continue
|
||||||
|
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||||
|
except Exception as exception: # pylint: disable=broad-except
|
||||||
|
validate.errors.append(f"Download was not complete [{exception}]")
|
||||||
|
|
||||||
|
return validate
|
||||||
|
|
||||||
|
|
||||||
|
async def download_content(repository):
|
||||||
|
"""Download the content of a directory."""
|
||||||
|
contents = gather_files_to_download(repository)
|
||||||
|
if not contents:
|
||||||
|
raise HacsException("No content to download")
|
||||||
|
|
||||||
|
for content in contents:
|
||||||
|
if repository.data.content_in_root and repository.data.filename is not None:
|
||||||
|
if content.name != repository.data.filename:
|
||||||
|
continue
|
||||||
|
repository.logger.debug(f"Downloading {content.name}")
|
||||||
|
|
||||||
|
filecontent = await async_download_file(content.download_url)
|
||||||
|
|
||||||
|
if filecontent is None:
|
||||||
|
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Save the content of the file.
|
||||||
|
if repository.content.single or content.path is None:
|
||||||
|
local_directory = repository.content.path.local
|
||||||
|
|
||||||
|
else:
|
||||||
|
_content_path = content.path
|
||||||
|
if not repository.data.content_in_root:
|
||||||
|
_content_path = _content_path.replace(
|
||||||
|
f"{repository.content.path.remote}", ""
|
||||||
|
)
|
||||||
|
|
||||||
|
local_directory = f"{repository.content.path.local}/{_content_path}"
|
||||||
|
local_directory = local_directory.split("/")
|
||||||
|
del local_directory[-1]
|
||||||
|
local_directory = "/".join(local_directory)
|
||||||
|
|
||||||
|
# Check local directory
|
||||||
|
pathlib.Path(local_directory).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
local_file_path = (f"{local_directory}/{content.name}").replace("//", "/")
|
||||||
|
|
||||||
|
result = await async_save_file(local_file_path, filecontent)
|
||||||
|
if result:
|
||||||
|
repository.logger.info(f"download of {content.name} complete")
|
||||||
|
continue
|
||||||
|
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
"""Filter functions."""
|
||||||
|
|
||||||
|
|
||||||
|
def filter_content_return_one_of_type(
|
||||||
|
content, namestartswith, filterfiltype, attr="name"
|
||||||
|
):
|
||||||
|
"""Only match 1 of the filter."""
|
||||||
|
contents = []
|
||||||
|
filetypefound = False
|
||||||
|
for filename in content:
|
||||||
|
if isinstance(filename, str):
|
||||||
|
if filename.startswith(namestartswith):
|
||||||
|
if filename.endswith(f".{filterfiltype}"):
|
||||||
|
if not filetypefound:
|
||||||
|
contents.append(filename)
|
||||||
|
filetypefound = True
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
contents.append(filename)
|
||||||
|
else:
|
||||||
|
if getattr(filename, attr).startswith(namestartswith):
|
||||||
|
if getattr(filename, attr).endswith(f".{filterfiltype}"):
|
||||||
|
if not filetypefound:
|
||||||
|
contents.append(filename)
|
||||||
|
filetypefound = True
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
contents.append(filename)
|
||||||
|
return contents
|
||||||
|
|
||||||
|
|
||||||
|
def find_first_of_filetype(content, filterfiltype, attr="name"):
|
||||||
|
"""Find the first of the file type."""
|
||||||
|
filename = ""
|
||||||
|
for _filename in content:
|
||||||
|
if isinstance(_filename, str):
|
||||||
|
if _filename.endswith(f".{filterfiltype}"):
|
||||||
|
filename = _filename
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if getattr(_filename, attr).endswith(f".{filterfiltype}"):
|
||||||
|
filename = getattr(_filename, attr)
|
||||||
|
break
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
def get_first_directory_in_directory(content, dirname):
|
||||||
|
"""Return the first directory in dirname or None."""
|
||||||
|
directory = None
|
||||||
|
for path in content:
|
||||||
|
if path.full_path.startswith(dirname) and path.full_path != dirname:
|
||||||
|
if path.is_directory:
|
||||||
|
directory = path.filename
|
||||||
|
break
|
||||||
|
return directory
|
|
@ -0,0 +1,44 @@
|
||||||
|
"""Helpers to get default repositories."""
|
||||||
|
import json
|
||||||
|
from aiogithubapi import AIOGitHub, AIOGitHubException
|
||||||
|
from integrationhelper import Logger
|
||||||
|
from custom_components.hacs.helpers.information import get_repository
|
||||||
|
|
||||||
|
|
||||||
|
async def get_default_repos_orgs(github: type(AIOGitHub), category: str) -> dict:
|
||||||
|
"""Gets default org repositories."""
|
||||||
|
repositories = []
|
||||||
|
logger = Logger("hacs")
|
||||||
|
orgs = {
|
||||||
|
"plugin": "custom-cards",
|
||||||
|
"integration": "custom-components",
|
||||||
|
"theme": "home-assistant-community-themes",
|
||||||
|
}
|
||||||
|
if category not in orgs:
|
||||||
|
return repositories
|
||||||
|
|
||||||
|
try:
|
||||||
|
repos = await github.get_org_repos(orgs[category])
|
||||||
|
for repo in repos:
|
||||||
|
repositories.append(repo.full_name)
|
||||||
|
|
||||||
|
except AIOGitHubException as exception:
|
||||||
|
logger.error(exception)
|
||||||
|
|
||||||
|
return repositories
|
||||||
|
|
||||||
|
|
||||||
|
async def get_default_repos_lists(session, token, default: str) -> dict:
|
||||||
|
"""Gets repositories from default list."""
|
||||||
|
repositories = []
|
||||||
|
logger = Logger("hacs")
|
||||||
|
|
||||||
|
try:
|
||||||
|
repo = await get_repository(session, token, "hacs/default")
|
||||||
|
content = await repo.get_contents(default)
|
||||||
|
repositories = json.loads(content.content)
|
||||||
|
|
||||||
|
except AIOGitHubException as exception:
|
||||||
|
logger.error(exception)
|
||||||
|
|
||||||
|
return repositories
|
|
@ -0,0 +1,181 @@
|
||||||
|
"""Return repository information if any."""
|
||||||
|
import json
|
||||||
|
from aiogithubapi import AIOGitHubException, AIOGitHub
|
||||||
|
from custom_components.hacs.handler.template import render_template
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
|
||||||
|
|
||||||
|
def info_file(repository):
|
||||||
|
"""get info filename."""
|
||||||
|
if repository.data.render_readme:
|
||||||
|
for filename in ["readme", "readme.md", "README", "README.md", "README.MD"]:
|
||||||
|
if filename in repository.treefiles:
|
||||||
|
return filename
|
||||||
|
return ""
|
||||||
|
for filename in ["info", "info.md", "INFO", "INFO.md", "INFO.MD"]:
|
||||||
|
if filename in repository.treefiles:
|
||||||
|
return filename
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
async def get_info_md_content(repository):
|
||||||
|
"""Get the content of info.md"""
|
||||||
|
filename = info_file(repository)
|
||||||
|
if not filename:
|
||||||
|
return ""
|
||||||
|
try:
|
||||||
|
info = await repository.repository_object.get_contents(filename, repository.ref)
|
||||||
|
if info is None:
|
||||||
|
return ""
|
||||||
|
info = info.content.replace("<svg", "<disabled").replace("</svg", "</disabled")
|
||||||
|
return render_template(info, repository)
|
||||||
|
except (AIOGitHubException, Exception): # pylint: disable=broad-except
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
async def get_repository(session, token, repository_full_name):
|
||||||
|
"""Return a repository object or None."""
|
||||||
|
try:
|
||||||
|
github = AIOGitHub(token, session)
|
||||||
|
repository = await github.get_repo(repository_full_name)
|
||||||
|
return repository
|
||||||
|
except AIOGitHubException as exception:
|
||||||
|
raise HacsException(exception)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_tree(repository, ref):
|
||||||
|
"""Return the repository tree."""
|
||||||
|
try:
|
||||||
|
tree = await repository.get_tree(ref)
|
||||||
|
return tree
|
||||||
|
except AIOGitHubException as exception:
|
||||||
|
raise HacsException(exception)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_releases(repository, prerelease=False, returnlimit=5):
|
||||||
|
"""Return the repository releases."""
|
||||||
|
try:
|
||||||
|
releases = await repository.get_releases(prerelease, returnlimit)
|
||||||
|
return releases
|
||||||
|
except AIOGitHubException as exception:
|
||||||
|
raise HacsException(exception)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_integration_manifest(repository):
|
||||||
|
"""Return the integration manifest."""
|
||||||
|
manifest_path = f"{repository.content.path.remote}/manifest.json"
|
||||||
|
if not manifest_path in [x.full_path for x in repository.tree]:
|
||||||
|
raise HacsException(f"No file found '{manifest_path}'")
|
||||||
|
try:
|
||||||
|
manifest = await repository.repository_object.get_contents(
|
||||||
|
manifest_path, repository.ref
|
||||||
|
)
|
||||||
|
manifest = json.loads(manifest.content)
|
||||||
|
except Exception as exception: # pylint: disable=broad-except
|
||||||
|
raise HacsException(f"Could not read manifest.json [{exception}]")
|
||||||
|
|
||||||
|
try:
|
||||||
|
repository.integration_manifest = manifest
|
||||||
|
repository.data.authors = manifest["codeowners"]
|
||||||
|
repository.data.domain = manifest["domain"]
|
||||||
|
repository.data.manifest_name = manifest["name"]
|
||||||
|
repository.data.homeassistant = manifest.get("homeassistant")
|
||||||
|
|
||||||
|
# Set local path
|
||||||
|
repository.content.path.local = repository.localpath
|
||||||
|
|
||||||
|
except KeyError as exception:
|
||||||
|
raise HacsException(f"Missing expected key {exception} in 'manifest.json'")
|
||||||
|
|
||||||
|
|
||||||
|
def find_file_name(repository):
|
||||||
|
"""Get the filename to target."""
|
||||||
|
if repository.data.category == "plugin":
|
||||||
|
get_file_name_plugin(repository)
|
||||||
|
elif repository.data.category == "integration":
|
||||||
|
get_file_name_integration(repository)
|
||||||
|
elif repository.data.category == "theme":
|
||||||
|
get_file_name_theme(repository)
|
||||||
|
elif repository.data.category == "appdaemon":
|
||||||
|
get_file_name_appdaemon(repository)
|
||||||
|
elif repository.data.category == "python_script":
|
||||||
|
get_file_name_python_script(repository)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_name_plugin(repository):
|
||||||
|
"""Get the filename to target."""
|
||||||
|
tree = repository.tree
|
||||||
|
releases = repository.releases.objects
|
||||||
|
|
||||||
|
if repository.data.content_in_root:
|
||||||
|
possible_locations = [""]
|
||||||
|
else:
|
||||||
|
possible_locations = ["release", "dist", ""]
|
||||||
|
|
||||||
|
# Handler for plug requirement 3
|
||||||
|
if repository.data.filename:
|
||||||
|
valid_filenames = [repository.data.filename]
|
||||||
|
else:
|
||||||
|
valid_filenames = [
|
||||||
|
f"{repository.data.name.replace('lovelace-', '')}.js",
|
||||||
|
f"{repository.data.name}.js",
|
||||||
|
f"{repository.data.name}.umd.js",
|
||||||
|
f"{repository.data.name}-bundle.js",
|
||||||
|
]
|
||||||
|
|
||||||
|
for location in possible_locations:
|
||||||
|
if location == "release":
|
||||||
|
if not releases:
|
||||||
|
continue
|
||||||
|
release = releases[0]
|
||||||
|
if not release.assets:
|
||||||
|
continue
|
||||||
|
asset = release.assets[0]
|
||||||
|
for filename in valid_filenames:
|
||||||
|
if filename == asset.name:
|
||||||
|
repository.data.file_name = filename
|
||||||
|
repository.content.path.remote = "release"
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
for filename in valid_filenames:
|
||||||
|
if f"{location+'/' if location else ''}{filename}" in [
|
||||||
|
x.full_path for x in tree
|
||||||
|
]:
|
||||||
|
repository.data.file_name = filename
|
||||||
|
repository.content.path.remote = location
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_name_integration(repository):
|
||||||
|
"""Get the filename to target."""
|
||||||
|
tree = repository.tree
|
||||||
|
releases = repository.releases.objects
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_name_theme(repository):
|
||||||
|
"""Get the filename to target."""
|
||||||
|
tree = repository.tree
|
||||||
|
|
||||||
|
for treefile in tree:
|
||||||
|
if treefile.full_path.startswith(
|
||||||
|
repository.content.path.remote
|
||||||
|
) and treefile.full_path.endswith(".yaml"):
|
||||||
|
repository.data.file_name = treefile.filename
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_name_appdaemon(repository):
|
||||||
|
"""Get the filename to target."""
|
||||||
|
tree = repository.tree
|
||||||
|
releases = repository.releases.objects
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_name_python_script(repository):
|
||||||
|
"""Get the filename to target."""
|
||||||
|
tree = repository.tree
|
||||||
|
|
||||||
|
for treefile in tree:
|
||||||
|
if treefile.full_path.startswith(
|
||||||
|
repository.content.path.remote
|
||||||
|
) and treefile.full_path.endswith(".py"):
|
||||||
|
repository.data.file_name = treefile.filename
|
|
@ -0,0 +1,116 @@
|
||||||
|
"""Install helper for repositories."""
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
from custom_components.hacs.hacsbase.backup import Backup
|
||||||
|
from custom_components.hacs.helpers.download import download_content
|
||||||
|
|
||||||
|
|
||||||
|
async def install_repository(repository):
|
||||||
|
"""Common installation steps of the repository."""
|
||||||
|
persistent_directory = None
|
||||||
|
await repository.update_repository()
|
||||||
|
|
||||||
|
if not repository.can_install:
|
||||||
|
raise HacsException(
|
||||||
|
"The version of Home Assistant is not compatible with this version"
|
||||||
|
)
|
||||||
|
|
||||||
|
version = version_to_install(repository)
|
||||||
|
if version == repository.data.default_branch:
|
||||||
|
repository.ref = version
|
||||||
|
else:
|
||||||
|
repository.ref = f"tags/{version}"
|
||||||
|
|
||||||
|
if repository.data.persistent_directory:
|
||||||
|
if os.path.exists(
|
||||||
|
f"{repository.content.path.local}/{repository.data.persistent_directory}"
|
||||||
|
):
|
||||||
|
persistent_directory = Backup(
|
||||||
|
f"{repository.content.path.local}/{repository.data.persistent_directory}",
|
||||||
|
tempfile.gettempdir() + "/hacs_persistent_directory/",
|
||||||
|
)
|
||||||
|
persistent_directory.create()
|
||||||
|
|
||||||
|
if repository.status.installed and not repository.content.single:
|
||||||
|
backup = Backup(repository.content.path.local)
|
||||||
|
backup.create()
|
||||||
|
|
||||||
|
if repository.data.zip_release and version != repository.data.default_branch:
|
||||||
|
await repository.download_zip(repository)
|
||||||
|
else:
|
||||||
|
await download_content(repository)
|
||||||
|
|
||||||
|
if repository.validate.errors:
|
||||||
|
for error in repository.validate.errors:
|
||||||
|
repository.logger.error(error)
|
||||||
|
if repository.status.installed and not repository.content.single:
|
||||||
|
backup.restore()
|
||||||
|
|
||||||
|
if repository.status.installed and not repository.content.single:
|
||||||
|
backup.cleanup()
|
||||||
|
|
||||||
|
if persistent_directory is not None:
|
||||||
|
persistent_directory.restore()
|
||||||
|
persistent_directory.cleanup()
|
||||||
|
|
||||||
|
if repository.validate.success:
|
||||||
|
if repository.data.full_name not in repository.hacs.common.installed:
|
||||||
|
if repository.data.full_name == "hacs/integration":
|
||||||
|
repository.hacs.common.installed.append(repository.data.full_name)
|
||||||
|
repository.status.installed = True
|
||||||
|
repository.versions.installed_commit = repository.versions.available_commit
|
||||||
|
|
||||||
|
if version == repository.data.default_branch:
|
||||||
|
repository.versions.installed = None
|
||||||
|
else:
|
||||||
|
repository.versions.installed = version
|
||||||
|
|
||||||
|
await reload_after_install(repository)
|
||||||
|
installation_complete(repository)
|
||||||
|
|
||||||
|
|
||||||
|
async def reload_after_install(repository):
|
||||||
|
"""Reload action after installation success."""
|
||||||
|
if repository.data.category == "integration":
|
||||||
|
if repository.config_flow:
|
||||||
|
if repository.data.full_name != "hacs/integration":
|
||||||
|
await repository.reload_custom_components()
|
||||||
|
repository.pending_restart = True
|
||||||
|
|
||||||
|
elif repository.data.category == "theme":
|
||||||
|
try:
|
||||||
|
await repository.hacs.hass.services.async_call(
|
||||||
|
"frontend", "reload_themes", {}
|
||||||
|
)
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def installation_complete(repository):
|
||||||
|
"""Action to run when the installation is complete."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
hacs.hass.bus.async_fire(
|
||||||
|
"hacs/repository",
|
||||||
|
{"id": 1337, "action": "install", "repository": repository.data.full_name},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def version_to_install(repository):
|
||||||
|
"""Determine which version to isntall."""
|
||||||
|
if repository.versions.available is not None:
|
||||||
|
if repository.status.selected_tag is not None:
|
||||||
|
if repository.status.selected_tag == repository.versions.available:
|
||||||
|
repository.status.selected_tag = None
|
||||||
|
return repository.versions.available
|
||||||
|
return repository.status.selected_tag
|
||||||
|
return repository.versions.available
|
||||||
|
if repository.status.selected_tag is not None:
|
||||||
|
if repository.status.selected_tag == repository.data.default_branch:
|
||||||
|
return repository.data.default_branch
|
||||||
|
if repository.status.selected_tag in repository.releases.published_tags:
|
||||||
|
return repository.status.selected_tag
|
||||||
|
if repository.data.default_branch is None:
|
||||||
|
return "master"
|
||||||
|
return repository.data.default_branch
|
|
@ -0,0 +1,30 @@
|
||||||
|
"""Helper functions: misc"""
|
||||||
|
import semantic_version
|
||||||
|
|
||||||
|
|
||||||
|
def get_repository_name(repository) -> str:
|
||||||
|
"""Return the name of the repository for use in the frontend."""
|
||||||
|
|
||||||
|
if repository.repository_manifest.name is not None:
|
||||||
|
return repository.repository_manifest.name
|
||||||
|
|
||||||
|
if repository.data.category == "integration":
|
||||||
|
if repository.integration_manifest:
|
||||||
|
if "name" in repository.integration_manifest:
|
||||||
|
return repository.integration_manifest["name"]
|
||||||
|
|
||||||
|
return (
|
||||||
|
repository.data.full_name.split("/")[-1]
|
||||||
|
.replace("-", " ")
|
||||||
|
.replace("_", " ")
|
||||||
|
.title()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def version_left_higher_then_right(new: str, old: str) -> bool:
|
||||||
|
"""Return a bool if source is newer than target, will also be true if identical."""
|
||||||
|
if not isinstance(new, str) or not isinstance(old, str):
|
||||||
|
return False
|
||||||
|
if new == old:
|
||||||
|
return True
|
||||||
|
return semantic_version.Version.coerce(new) > semantic_version.Version.coerce(old)
|
|
@ -0,0 +1,20 @@
|
||||||
|
"""Verify network."""
|
||||||
|
import socket
|
||||||
|
|
||||||
|
|
||||||
|
def internet_connectivity_check():
|
||||||
|
"""Verify network connectivity."""
|
||||||
|
hosts = [{"host": "github.com", "port": 443, "connection": False}]
|
||||||
|
|
||||||
|
for host in hosts:
|
||||||
|
try:
|
||||||
|
socket.setdefaulttimeout(3)
|
||||||
|
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect(
|
||||||
|
(host["host"], host["port"])
|
||||||
|
)
|
||||||
|
|
||||||
|
host["connection"] = True
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
host["connection"] = False
|
||||||
|
|
||||||
|
return False not in [x["connection"] for x in hosts]
|
|
@ -0,0 +1,49 @@
|
||||||
|
"""Register a repository."""
|
||||||
|
from aiogithubapi import AIOGitHubException
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import (
|
||||||
|
HacsException,
|
||||||
|
HacsExpectedException,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def register_repository(full_name, category, check=True):
|
||||||
|
"""Register a repository."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
from custom_components.hacs.repositories import (
|
||||||
|
RERPOSITORY_CLASSES,
|
||||||
|
) # To hanle import error
|
||||||
|
|
||||||
|
if full_name in hacs.common.skip:
|
||||||
|
if full_name != "hacs/integration":
|
||||||
|
raise HacsExpectedException(f"Skipping {full_name}")
|
||||||
|
|
||||||
|
if category not in RERPOSITORY_CLASSES:
|
||||||
|
raise HacsException(f"{category} is not a valid repository category.")
|
||||||
|
|
||||||
|
repository = RERPOSITORY_CLASSES[category](full_name)
|
||||||
|
if check:
|
||||||
|
try:
|
||||||
|
await repository.registration()
|
||||||
|
if hacs.system.status.new:
|
||||||
|
repository.status.new = False
|
||||||
|
if repository.validate.errors:
|
||||||
|
hacs.common.skip.append(repository.data.full_name)
|
||||||
|
if not hacs.system.status.startup:
|
||||||
|
hacs.logger.error(f"Validation for {full_name} failed.")
|
||||||
|
return repository.validate.errors
|
||||||
|
repository.logger.info("Registration complete")
|
||||||
|
except AIOGitHubException as exception:
|
||||||
|
hacs.common.skip.append(repository.data.full_name)
|
||||||
|
raise HacsException(f"Validation for {full_name} failed with {exception}.")
|
||||||
|
|
||||||
|
hacs.hass.bus.async_fire(
|
||||||
|
"hacs/repository",
|
||||||
|
{
|
||||||
|
"id": 1337,
|
||||||
|
"action": "registration",
|
||||||
|
"repository": repository.data.full_name,
|
||||||
|
"repository_id": repository.information.uid,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
hacs.repositories.append(repository)
|
|
@ -0,0 +1,90 @@
|
||||||
|
"""Helper to do common validation for repositories."""
|
||||||
|
from aiogithubapi import AIOGitHubException
|
||||||
|
from custom_components.hacs.globals import get_hacs, is_removed
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
from custom_components.hacs.helpers.install import version_to_install
|
||||||
|
from custom_components.hacs.helpers.information import (
|
||||||
|
get_repository,
|
||||||
|
get_tree,
|
||||||
|
get_releases,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def common_validate(repository):
|
||||||
|
"""Common validation steps of the repository."""
|
||||||
|
repository.validate.errors = []
|
||||||
|
|
||||||
|
# Make sure the repository exist.
|
||||||
|
repository.logger.debug("Checking repository.")
|
||||||
|
await common_update_data(repository)
|
||||||
|
|
||||||
|
# Step 6: Get the content of hacs.json
|
||||||
|
await repository.get_repository_manifest_content()
|
||||||
|
|
||||||
|
|
||||||
|
async def common_update_data(repository):
|
||||||
|
"""Common update data."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
try:
|
||||||
|
repository_object = await get_repository(
|
||||||
|
hacs.session, hacs.configuration.token, repository.data.full_name
|
||||||
|
)
|
||||||
|
repository.repository_object = repository_object
|
||||||
|
repository.data.update_data(repository_object.attributes)
|
||||||
|
except (AIOGitHubException, HacsException) as exception:
|
||||||
|
if not hacs.system.status.startup:
|
||||||
|
repository.logger.error(exception)
|
||||||
|
repository.validate.errors.append("Repository does not exist.")
|
||||||
|
raise HacsException(exception)
|
||||||
|
|
||||||
|
# Make sure the repository is not archived.
|
||||||
|
if repository.data.archived:
|
||||||
|
repository.validate.errors.append("Repository is archived.")
|
||||||
|
raise HacsException("Repository is archived.")
|
||||||
|
|
||||||
|
# Make sure the repository is not in the blacklist.
|
||||||
|
if is_removed(repository.data.full_name):
|
||||||
|
repository.validate.errors.append("Repository is in the blacklist.")
|
||||||
|
raise HacsException("Repository is in the blacklist.")
|
||||||
|
|
||||||
|
# Get releases.
|
||||||
|
try:
|
||||||
|
releases = await get_releases(
|
||||||
|
repository.repository_object,
|
||||||
|
repository.status.show_beta,
|
||||||
|
hacs.configuration.release_limit,
|
||||||
|
)
|
||||||
|
if releases:
|
||||||
|
repository.releases.releases = True
|
||||||
|
repository.releases.objects = releases
|
||||||
|
repository.releases.published_tags = [
|
||||||
|
x.tag_name for x in releases if not x.draft
|
||||||
|
]
|
||||||
|
repository.versions.available = next(iter(releases)).tag_name
|
||||||
|
for release in releases:
|
||||||
|
if release.tag_name == repository.ref:
|
||||||
|
assets = release.assets
|
||||||
|
if assets:
|
||||||
|
downloads = next(iter(assets)).attributes.get("download_count")
|
||||||
|
repository.releases.downloads = downloads
|
||||||
|
|
||||||
|
except (AIOGitHubException, HacsException):
|
||||||
|
repository.releases.releases = False
|
||||||
|
|
||||||
|
repository.ref = version_to_install(repository)
|
||||||
|
|
||||||
|
repository.logger.debug(
|
||||||
|
f"Running checks against {repository.ref.replace('tags/', '')}"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
repository.tree = await get_tree(repository.repository_object, repository.ref)
|
||||||
|
if not repository.tree:
|
||||||
|
raise HacsException("No files in tree")
|
||||||
|
repository.treefiles = []
|
||||||
|
for treefile in repository.tree:
|
||||||
|
repository.treefiles.append(treefile.full_path)
|
||||||
|
except (AIOGitHubException, HacsException) as exception:
|
||||||
|
if not hacs.system.status.startup:
|
||||||
|
repository.logger.error(exception)
|
||||||
|
raise HacsException(exception)
|
|
@ -0,0 +1,83 @@
|
||||||
|
"""HACS http endpoints."""
|
||||||
|
import os
|
||||||
|
from integrationhelper import Logger
|
||||||
|
from homeassistant.components.http import HomeAssistantView
|
||||||
|
from aiohttp import web
|
||||||
|
from hacs_frontend import locate_gz, locate_debug_gz
|
||||||
|
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
|
||||||
|
|
||||||
|
class HacsFrontend(HomeAssistantView):
|
||||||
|
"""Base View Class for HACS."""
|
||||||
|
|
||||||
|
requires_auth = False
|
||||||
|
name = "hacs_files"
|
||||||
|
url = r"/hacsfiles/{requested_file:.+}"
|
||||||
|
|
||||||
|
async def get(self, request, requested_file): # pylint: disable=unused-argument
|
||||||
|
"""Handle HACS Web requests."""
|
||||||
|
return await get_file_response(requested_file)
|
||||||
|
|
||||||
|
|
||||||
|
class HacsPluginViewLegacy(HacsFrontend):
|
||||||
|
"""Alias for legacy, remove with 1.0"""
|
||||||
|
|
||||||
|
name = "community_plugin"
|
||||||
|
url = r"/community_plugin/{requested_file:.+}"
|
||||||
|
|
||||||
|
async def get(self, request, requested_file): # pylint: disable=unused-argument
|
||||||
|
"""DEPRECATED."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
if hacs.system.ha_version.split(".")[1] >= "107":
|
||||||
|
logger = Logger("hacs.deprecated")
|
||||||
|
logger.warning(
|
||||||
|
"The '/community_plugin/*' is deprecated and will be removed in an upcomming version of HACS, it has been replaced by '/hacsfiles/*', if you use the UI to manage your lovelace configuration, you can update this by going to the settings tab in HACS, if you use YAML to manage your lovelace configuration, you manually need to replace the URL in your resources."
|
||||||
|
)
|
||||||
|
|
||||||
|
return await get_file_response(requested_file)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_file_response(requested_file):
|
||||||
|
"""Get file."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
|
||||||
|
if requested_file.startswith("frontend-"):
|
||||||
|
if hacs.configuration.debug:
|
||||||
|
servefile = await hacs.hass.async_add_executor_job(locate_debug_gz)
|
||||||
|
hacs.logger.debug("Serving DEBUG frontend")
|
||||||
|
else:
|
||||||
|
servefile = await hacs.hass.async_add_executor_job(locate_gz)
|
||||||
|
|
||||||
|
if os.path.exists(servefile):
|
||||||
|
return web.FileResponse(servefile)
|
||||||
|
elif requested_file == "iconset.js":
|
||||||
|
return web.FileResponse(
|
||||||
|
f"{hacs.system.config_path}/custom_components/hacs/iconset.js"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if requested_file.startswith("themes"):
|
||||||
|
file = f"{hacs.system.config_path}/{requested_file}"
|
||||||
|
else:
|
||||||
|
file = f"{hacs.system.config_path}/www/community/{requested_file}"
|
||||||
|
|
||||||
|
# Serve .gz if it exist
|
||||||
|
if os.path.exists(file + ".gz"):
|
||||||
|
file += ".gz"
|
||||||
|
|
||||||
|
if os.path.exists(file):
|
||||||
|
hacs.logger.debug("Serving {} from {}".format(requested_file, file))
|
||||||
|
response = web.FileResponse(file)
|
||||||
|
response.headers["Cache-Control"] = "no-store, max-age=0"
|
||||||
|
response.headers["Pragma"] = "no-store"
|
||||||
|
return response
|
||||||
|
else:
|
||||||
|
hacs.logger.error(f"Tried to serve up '{file}' but it does not exist")
|
||||||
|
|
||||||
|
except Exception as error: # pylint: disable=broad-except
|
||||||
|
hacs.logger.debug(
|
||||||
|
"there was an issue trying to serve {} - {}".format(requested_file, error)
|
||||||
|
)
|
||||||
|
|
||||||
|
return web.Response(status=404)
|
|
@ -0,0 +1,34 @@
|
||||||
|
const iconset = document.createElement("ha-iconset-svg");
|
||||||
|
iconset.name = "hacs";
|
||||||
|
iconset.size = "1024";
|
||||||
|
iconset.innerHTML = `
|
||||||
|
<svg version="1.1" id="hacs" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||||
|
viewBox="20 20 430 430">
|
||||||
|
<g>
|
||||||
|
<path class="st1" d="M367.1,110.6H132.9c-2.4,0-4.4-2-4.4-4.4V78.9c0-2.4,2-4.4,4.4-4.4h234.2c2.4,0,4.4,2,4.4,4.4v27.3
|
||||||
|
C371.5,108.6,369.5,110.6,367.1,110.6z"/>
|
||||||
|
<g>
|
||||||
|
<path class="st1" d="M128.5,329.7c0,2.9,2.3,5.2,5.2,5.2h87.8h57H293c2.9,0,5.2-2.3,5.2-5.2v-74.4c0-2.9,2.3-5.2,5.2-5.2h45.4
|
||||||
|
c2.9,0,5.2,2.3,5.2,5.2v74.4c0,2.9,2.3,5.2,5.2,5.2h7c2.9,0,5.2-2.3,5.2-5.2v-88.4c0-2.9-2.3-5.2-5.2-5.2H133.7
|
||||||
|
c-2.9,0-5.2,2.3-5.2,5.2V329.7z M154.5,253.6h88.6c2.4,0,4.4,2,4.4,4.4v55.1c0,2.4-2,4.4-4.4,4.4h-88.6c-2.4,0-4.4-2-4.4-4.4V258
|
||||||
|
C150.1,255.5,152.1,253.6,154.5,253.6z"/>
|
||||||
|
<path class="st1" d="M372.2,124.9c-0.4-2.2-2.3-3.8-4.6-3.8h-89.1h-57h-89.1c-2.3,0-4.2,1.6-4.6,3.8l-13.2,71.3
|
||||||
|
c-0.1,0.3-0.1,0.6-0.1,0.9V221c0,2.6,2.1,4.7,4.7,4.7h9.3h243h9.3c2.6,0,4.7-2.1,4.7-4.7V197c0-0.3,0-0.6-0.1-0.9L372.2,124.9z"/>
|
||||||
|
</g>
|
||||||
|
<g>
|
||||||
|
<path class="st1" d="M183.9,357.9v67.4h-15.1v-25.8h-25.2v25.8h-15.1v-67.4h15.1v26.6h25.2v-26.6H183.9z"/>
|
||||||
|
<path class="st1" d="M192.5,425.4l26.6-67.7h9.8l24.6,67.7h-16.1l-4-11.4H213l-4.3,11.4H192.5z M218.9,399h9.1l-4.3-12L218.9,399z
|
||||||
|
"/>
|
||||||
|
<path class="st1" d="M254.3,391.8c0-18.7,15.1-33.8,33.8-33.8c9.4,0,18.5,4,25,11.1l-10.9,9.8c-3.7-4-8.8-6.3-14.1-6.3
|
||||||
|
c-10.6,0-19.1,8.5-19.1,19.1c0,10.5,8.5,19.1,19.1,19.1c5.3,0,10.4-2.2,14.1-6.3l10.9,9.9c-6.5,7.1-15.5,11.1-25,11.1
|
||||||
|
C269.4,425.5,254.3,410.3,254.3,391.8z"/>
|
||||||
|
<path class="st1" d="M371.4,407.3c-0.5,5.8-4.4,11.1-10.3,14.4c-4.4,2.4-9.5,3.6-14.9,3.6c-1.2,0-2.3-0.1-3.4-0.2
|
||||||
|
c-9.5-0.9-17.8-5.4-22-11.9l12.3-8.1c1.4,2.3,5.4,4.9,11.1,5.4c3.7,0.4,7.4-0.3,10-1.7c1.6-0.8,2.7-2,2.8-3
|
||||||
|
c0.1-1.3-0.4-5.8-11.5-6.9c-15.9-1.5-25.8-10.8-24.6-22.9c0.5-6,4.4-11.2,10.4-14.6c5.2-2.7,11.8-4,18.3-3.3
|
||||||
|
c9.4,0.9,17.7,5.4,22,11.9l-12.3,8c-1.4-2.2-5.4-4.7-11.1-5.2c-3.7-0.4-7.4,0.3-10,1.6c-1.5,0.9-2.6,2.1-2.7,3
|
||||||
|
c-0.4,4.6,7.3,6.6,11.4,6.9v0.1C362.9,385.9,372.6,394.9,371.4,407.3z"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
`
|
||||||
|
document.body.appendChild(iconset);
|
|
@ -0,0 +1,24 @@
|
||||||
|
{
|
||||||
|
"codeowners": [
|
||||||
|
"@ludeeus"
|
||||||
|
],
|
||||||
|
"config_flow": true,
|
||||||
|
"dependencies": [
|
||||||
|
"websocket_api",
|
||||||
|
"frontend",
|
||||||
|
"persistent_notification",
|
||||||
|
"lovelace"
|
||||||
|
],
|
||||||
|
"documentation": "https://hacs.xyz",
|
||||||
|
"domain": "hacs",
|
||||||
|
"issues": "https://hacs.xyz/docs/issues",
|
||||||
|
"name": "HACS (Home Assistant Community Store)",
|
||||||
|
"requirements": [
|
||||||
|
"aiofiles==0.4.0",
|
||||||
|
"aiogithubapi==0.5.0",
|
||||||
|
"backoff==1.10.0",
|
||||||
|
"hacs_frontend==20200229193422",
|
||||||
|
"integrationhelper==0.2.2",
|
||||||
|
"semantic_version==2.8.4"
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
"""Initialize repositories."""
|
||||||
|
from custom_components.hacs.repositories.theme import HacsTheme
|
||||||
|
from custom_components.hacs.repositories.integration import HacsIntegration
|
||||||
|
from custom_components.hacs.repositories.python_script import HacsPythonScript
|
||||||
|
from custom_components.hacs.repositories.appdaemon import HacsAppdaemon
|
||||||
|
from custom_components.hacs.repositories.plugin import HacsPlugin
|
||||||
|
|
||||||
|
RERPOSITORY_CLASSES = {
|
||||||
|
"theme": HacsTheme,
|
||||||
|
"integration": HacsIntegration,
|
||||||
|
"python_script": HacsPythonScript,
|
||||||
|
"appdaemon": HacsAppdaemon,
|
||||||
|
"plugin": HacsPlugin,
|
||||||
|
}
|
|
@ -0,0 +1,85 @@
|
||||||
|
"""Class for appdaemon apps in HACS."""
|
||||||
|
from aiogithubapi import AIOGitHubException
|
||||||
|
from integrationhelper import Logger
|
||||||
|
|
||||||
|
from .repository import HacsRepository
|
||||||
|
from ..hacsbase.exceptions import HacsException
|
||||||
|
|
||||||
|
|
||||||
|
class HacsAppdaemon(HacsRepository):
|
||||||
|
"""Appdaemon apps in HACS."""
|
||||||
|
|
||||||
|
def __init__(self, full_name):
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__()
|
||||||
|
self.data.full_name = full_name
|
||||||
|
self.data.category = "appdaemon"
|
||||||
|
self.content.path.local = self.localpath
|
||||||
|
self.content.path.remote = "apps"
|
||||||
|
self.logger = Logger(f"hacs.repository.{self.data.category}.{full_name}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def localpath(self):
|
||||||
|
"""Return localpath."""
|
||||||
|
return f"{self.hacs.system.config_path}/appdaemon/apps/{self.data.name}"
|
||||||
|
|
||||||
|
async def validate_repository(self):
|
||||||
|
"""Validate."""
|
||||||
|
await self.common_validate()
|
||||||
|
|
||||||
|
# Custom step 1: Validate content.
|
||||||
|
try:
|
||||||
|
addir = await self.repository_object.get_contents("apps", self.ref)
|
||||||
|
except AIOGitHubException:
|
||||||
|
raise HacsException(
|
||||||
|
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(addir, list):
|
||||||
|
self.validate.errors.append("Repostitory structure not compliant")
|
||||||
|
|
||||||
|
self.content.path.remote = addir[0].path
|
||||||
|
self.content.objects = await self.repository_object.get_contents(
|
||||||
|
self.content.path.remote, self.ref
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle potential errors
|
||||||
|
if self.validate.errors:
|
||||||
|
for error in self.validate.errors:
|
||||||
|
if not self.hacs.system.status.startup:
|
||||||
|
self.logger.error(error)
|
||||||
|
return self.validate.success
|
||||||
|
|
||||||
|
async def registration(self):
|
||||||
|
"""Registration."""
|
||||||
|
if not await self.validate_repository():
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Run common registration steps.
|
||||||
|
await self.common_registration()
|
||||||
|
|
||||||
|
# Set local path
|
||||||
|
self.content.path.local = self.localpath
|
||||||
|
|
||||||
|
async def update_repository(self):
|
||||||
|
"""Update."""
|
||||||
|
if self.hacs.github.ratelimits.remaining == 0:
|
||||||
|
return
|
||||||
|
await self.common_update()
|
||||||
|
|
||||||
|
# Get appdaemon objects.
|
||||||
|
if self.repository_manifest:
|
||||||
|
if self.data.content_in_root:
|
||||||
|
self.content.path.remote = ""
|
||||||
|
|
||||||
|
if self.content.path.remote == "apps":
|
||||||
|
addir = await self.repository_object.get_contents(
|
||||||
|
self.content.path.remote, self.ref
|
||||||
|
)
|
||||||
|
self.content.path.remote = addir[0].path
|
||||||
|
self.content.objects = await self.repository_object.get_contents(
|
||||||
|
self.content.path.remote, self.ref
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set local path
|
||||||
|
self.content.path.local = self.localpath
|
|
@ -0,0 +1,93 @@
|
||||||
|
"""Class for integrations in HACS."""
|
||||||
|
from integrationhelper import Logger
|
||||||
|
|
||||||
|
from homeassistant.loader import async_get_custom_components
|
||||||
|
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
from custom_components.hacs.helpers.filters import get_first_directory_in_directory
|
||||||
|
from custom_components.hacs.helpers.information import get_integration_manifest
|
||||||
|
from custom_components.hacs.repositories.repository import HacsRepository
|
||||||
|
|
||||||
|
|
||||||
|
class HacsIntegration(HacsRepository):
|
||||||
|
"""Integrations in HACS."""
|
||||||
|
|
||||||
|
def __init__(self, full_name):
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__()
|
||||||
|
self.data.full_name = full_name
|
||||||
|
self.data.category = "integration"
|
||||||
|
self.content.path.remote = "custom_components"
|
||||||
|
self.content.path.local = self.localpath
|
||||||
|
self.logger = Logger(f"hacs.repository.{self.data.category}.{full_name}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def localpath(self):
|
||||||
|
"""Return localpath."""
|
||||||
|
return f"{self.hacs.system.config_path}/custom_components/{self.data.domain}"
|
||||||
|
|
||||||
|
async def validate_repository(self):
|
||||||
|
"""Validate."""
|
||||||
|
await self.common_validate()
|
||||||
|
|
||||||
|
# Custom step 1: Validate content.
|
||||||
|
if self.data.content_in_root:
|
||||||
|
self.content.path.remote = ""
|
||||||
|
|
||||||
|
if self.content.path.remote == "custom_components":
|
||||||
|
name = get_first_directory_in_directory(self.tree, "custom_components")
|
||||||
|
if name is None:
|
||||||
|
raise HacsException(
|
||||||
|
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||||
|
)
|
||||||
|
self.content.path.remote = f"custom_components/{name}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
await get_integration_manifest(self)
|
||||||
|
except HacsException as exception:
|
||||||
|
self.logger.error(exception)
|
||||||
|
|
||||||
|
# Handle potential errors
|
||||||
|
if self.validate.errors:
|
||||||
|
for error in self.validate.errors:
|
||||||
|
if not self.hacs.system.status.startup:
|
||||||
|
self.logger.error(error)
|
||||||
|
return self.validate.success
|
||||||
|
|
||||||
|
async def registration(self):
|
||||||
|
"""Registration."""
|
||||||
|
if not await self.validate_repository():
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Run common registration steps.
|
||||||
|
await self.common_registration()
|
||||||
|
|
||||||
|
# Set local path
|
||||||
|
self.content.path.local = self.localpath
|
||||||
|
|
||||||
|
async def update_repository(self):
|
||||||
|
"""Update."""
|
||||||
|
if self.hacs.github.ratelimits.remaining == 0:
|
||||||
|
return
|
||||||
|
await self.common_update()
|
||||||
|
|
||||||
|
if self.data.content_in_root:
|
||||||
|
self.content.path.remote = ""
|
||||||
|
|
||||||
|
if self.content.path.remote == "custom_components":
|
||||||
|
name = get_first_directory_in_directory(self.tree, "custom_components")
|
||||||
|
self.content.path.remote = f"custom_components/{name}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
await get_integration_manifest(self)
|
||||||
|
except HacsException as exception:
|
||||||
|
self.logger.error(exception)
|
||||||
|
|
||||||
|
# Set local path
|
||||||
|
self.content.path.local = self.localpath
|
||||||
|
|
||||||
|
async def reload_custom_components(self):
|
||||||
|
"""Reload custom_components (and config flows)in HA."""
|
||||||
|
self.logger.info("Reloading custom_component cache")
|
||||||
|
del self.hacs.hass.data["custom_components"]
|
||||||
|
await async_get_custom_components(self.hacs.hass)
|
|
@ -0,0 +1,42 @@
|
||||||
|
"""
|
||||||
|
Manifest handling of a repository.
|
||||||
|
|
||||||
|
https://hacs.xyz/docs/publish/start#hacsjson
|
||||||
|
"""
|
||||||
|
from typing import List
|
||||||
|
import attr
|
||||||
|
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(auto_attribs=True)
|
||||||
|
class HacsManifest:
|
||||||
|
"""HacsManifest class."""
|
||||||
|
|
||||||
|
name: str = None
|
||||||
|
content_in_root: bool = False
|
||||||
|
zip_release: bool = False
|
||||||
|
filename: str = None
|
||||||
|
manifest: dict = {}
|
||||||
|
hacs: str = None
|
||||||
|
hide_default_branch: bool = False
|
||||||
|
domains: List[str] = []
|
||||||
|
country: List[str] = []
|
||||||
|
homeassistant: str = None
|
||||||
|
persistent_directory: str = None
|
||||||
|
iot_class: str = None
|
||||||
|
render_readme: bool = False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_dict(manifest: dict):
|
||||||
|
"""Set attributes from dicts."""
|
||||||
|
if manifest is None:
|
||||||
|
raise HacsException("Missing manifest data")
|
||||||
|
|
||||||
|
manifest_data = HacsManifest()
|
||||||
|
|
||||||
|
manifest_data.manifest = manifest
|
||||||
|
|
||||||
|
for key in manifest:
|
||||||
|
setattr(manifest_data, key, manifest[key])
|
||||||
|
return manifest_data
|
|
@ -0,0 +1,107 @@
|
||||||
|
"""Class for plugins in HACS."""
|
||||||
|
import json
|
||||||
|
from integrationhelper import Logger
|
||||||
|
|
||||||
|
from .repository import HacsRepository
|
||||||
|
from ..hacsbase.exceptions import HacsException
|
||||||
|
|
||||||
|
from custom_components.hacs.helpers.information import find_file_name
|
||||||
|
|
||||||
|
|
||||||
|
class HacsPlugin(HacsRepository):
|
||||||
|
"""Plugins in HACS."""
|
||||||
|
|
||||||
|
def __init__(self, full_name):
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__()
|
||||||
|
self.data.full_name = full_name
|
||||||
|
self.data.file_name = None
|
||||||
|
self.data.category = "plugin"
|
||||||
|
self.information.javascript_type = None
|
||||||
|
self.content.path.local = (
|
||||||
|
f"{self.hacs.system.config_path}/www/community/{full_name.split('/')[-1]}"
|
||||||
|
)
|
||||||
|
self.logger = Logger(f"hacs.repository.{self.data.category}.{full_name}")
|
||||||
|
|
||||||
|
async def validate_repository(self):
|
||||||
|
"""Validate."""
|
||||||
|
# Run common validation steps.
|
||||||
|
await self.common_validate()
|
||||||
|
|
||||||
|
# Custom step 1: Validate content.
|
||||||
|
find_file_name(self)
|
||||||
|
|
||||||
|
if self.content.path.remote is None:
|
||||||
|
raise HacsException(
|
||||||
|
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.content.path.remote == "release":
|
||||||
|
self.content.single = True
|
||||||
|
|
||||||
|
# Handle potential errors
|
||||||
|
if self.validate.errors:
|
||||||
|
for error in self.validate.errors:
|
||||||
|
if not self.hacs.system.status.startup:
|
||||||
|
self.logger.error(error)
|
||||||
|
return self.validate.success
|
||||||
|
|
||||||
|
async def registration(self):
|
||||||
|
"""Registration."""
|
||||||
|
if not await self.validate_repository():
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Run common registration steps.
|
||||||
|
await self.common_registration()
|
||||||
|
|
||||||
|
async def update_repository(self):
|
||||||
|
"""Update."""
|
||||||
|
if self.hacs.github.ratelimits.remaining == 0:
|
||||||
|
return
|
||||||
|
# Run common update steps.
|
||||||
|
await self.common_update()
|
||||||
|
|
||||||
|
# Get plugin objects.
|
||||||
|
find_file_name(self)
|
||||||
|
|
||||||
|
# Get JS type
|
||||||
|
await self.parse_readme_for_jstype()
|
||||||
|
|
||||||
|
if self.content.path.remote is None:
|
||||||
|
self.validate.errors.append("Repostitory structure not compliant")
|
||||||
|
|
||||||
|
if self.content.path.remote == "release":
|
||||||
|
self.content.single = True
|
||||||
|
|
||||||
|
async def get_package_content(self):
|
||||||
|
"""Get package content."""
|
||||||
|
try:
|
||||||
|
package = await self.repository_object.get_contents("package.json")
|
||||||
|
package = json.loads(package.content)
|
||||||
|
|
||||||
|
if package:
|
||||||
|
self.data.authors = package["author"]
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def parse_readme_for_jstype(self):
|
||||||
|
"""Parse the readme looking for js type."""
|
||||||
|
readme = None
|
||||||
|
readme_files = ["readme", "readme.md"]
|
||||||
|
root = await self.repository_object.get_contents("")
|
||||||
|
for file in root:
|
||||||
|
if file.name.lower() in readme_files:
|
||||||
|
readme = await self.repository_object.get_contents(file.name)
|
||||||
|
break
|
||||||
|
|
||||||
|
if readme is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
readme = readme.content
|
||||||
|
for line in readme.splitlines():
|
||||||
|
if "type: module" in line:
|
||||||
|
self.information.javascript_type = "module"
|
||||||
|
break
|
||||||
|
elif "type: js" in line:
|
||||||
|
self.information.javascript_type = "js"
|
||||||
|
break
|
|
@ -0,0 +1,87 @@
|
||||||
|
"""Class for python_scripts in HACS."""
|
||||||
|
from integrationhelper import Logger
|
||||||
|
|
||||||
|
from .repository import HacsRepository
|
||||||
|
from ..hacsbase.exceptions import HacsException
|
||||||
|
from ..helpers.information import find_file_name
|
||||||
|
|
||||||
|
|
||||||
|
class HacsPythonScript(HacsRepository):
|
||||||
|
"""python_scripts in HACS."""
|
||||||
|
|
||||||
|
category = "python_script"
|
||||||
|
|
||||||
|
def __init__(self, full_name):
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__()
|
||||||
|
self.data.full_name = full_name
|
||||||
|
self.data.category = "python_script"
|
||||||
|
self.content.path.remote = "python_scripts"
|
||||||
|
self.content.path.local = f"{self.hacs.system.config_path}/python_scripts"
|
||||||
|
self.content.single = True
|
||||||
|
self.logger = Logger(f"hacs.repository.{self.data.category}.{full_name}")
|
||||||
|
|
||||||
|
async def validate_repository(self):
|
||||||
|
"""Validate."""
|
||||||
|
# Run common validation steps.
|
||||||
|
await self.common_validate()
|
||||||
|
|
||||||
|
# Custom step 1: Validate content.
|
||||||
|
if self.data.content_in_root:
|
||||||
|
self.content.path.remote = ""
|
||||||
|
|
||||||
|
compliant = False
|
||||||
|
for treefile in self.treefiles:
|
||||||
|
if treefile.startswith(f"{self.content.path.remote}") and treefile.endswith(
|
||||||
|
".py"
|
||||||
|
):
|
||||||
|
compliant = True
|
||||||
|
break
|
||||||
|
if not compliant:
|
||||||
|
raise HacsException(
|
||||||
|
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle potential errors
|
||||||
|
if self.validate.errors:
|
||||||
|
for error in self.validate.errors:
|
||||||
|
if not self.hacs.system.status.startup:
|
||||||
|
self.logger.error(error)
|
||||||
|
return self.validate.success
|
||||||
|
|
||||||
|
async def registration(self):
|
||||||
|
"""Registration."""
|
||||||
|
if not await self.validate_repository():
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Run common registration steps.
|
||||||
|
await self.common_registration()
|
||||||
|
|
||||||
|
# Set name
|
||||||
|
find_file_name(self)
|
||||||
|
|
||||||
|
async def update_repository(self): # lgtm[py/similar-function]
|
||||||
|
"""Update."""
|
||||||
|
if self.hacs.github.ratelimits.remaining == 0:
|
||||||
|
return
|
||||||
|
# Run common update steps.
|
||||||
|
await self.common_update()
|
||||||
|
|
||||||
|
# Get python_script objects.
|
||||||
|
if self.data.content_in_root:
|
||||||
|
self.content.path.remote = ""
|
||||||
|
|
||||||
|
compliant = False
|
||||||
|
for treefile in self.treefiles:
|
||||||
|
if treefile.startswith(f"{self.content.path.remote}") and treefile.endswith(
|
||||||
|
".py"
|
||||||
|
):
|
||||||
|
compliant = True
|
||||||
|
break
|
||||||
|
if not compliant:
|
||||||
|
raise HacsException(
|
||||||
|
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update name
|
||||||
|
find_file_name(self)
|
|
@ -0,0 +1,17 @@
|
||||||
|
"""Object for removed repositories."""
|
||||||
|
import attr
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(auto_attribs=True)
|
||||||
|
class RemovedRepository:
|
||||||
|
repository: str = None
|
||||||
|
reason: str = None
|
||||||
|
link: str = None
|
||||||
|
removal_type: str = None # archived, not_compliant, critical, dev, broken
|
||||||
|
acknowledged: bool = False
|
||||||
|
|
||||||
|
def update_data(self, data: dict):
|
||||||
|
"""Update data of the repository."""
|
||||||
|
for key in data:
|
||||||
|
if key in self.__dict__:
|
||||||
|
setattr(self, key, data[key])
|
|
@ -0,0 +1,424 @@
|
||||||
|
"""Repository."""
|
||||||
|
# pylint: disable=broad-except, bad-continuation, no-member
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
import zipfile
|
||||||
|
from integrationhelper import Validate
|
||||||
|
from aiogithubapi import AIOGitHubException
|
||||||
|
from .manifest import HacsManifest
|
||||||
|
from ..helpers.misc import get_repository_name
|
||||||
|
from ..handler.download import async_download_file, async_save_file
|
||||||
|
from ..helpers.misc import version_left_higher_then_right
|
||||||
|
from ..helpers.install import install_repository, version_to_install
|
||||||
|
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
from custom_components.hacs.helpers.information import (
|
||||||
|
get_info_md_content,
|
||||||
|
get_repository,
|
||||||
|
)
|
||||||
|
from custom_components.hacs.helpers.validate_repository import (
|
||||||
|
common_validate,
|
||||||
|
common_update_data,
|
||||||
|
)
|
||||||
|
from custom_components.hacs.repositories.repositorydata import RepositoryData
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryVersions:
|
||||||
|
"""Versions."""
|
||||||
|
|
||||||
|
available = None
|
||||||
|
available_commit = None
|
||||||
|
installed = None
|
||||||
|
installed_commit = None
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryStatus:
|
||||||
|
"""Repository status."""
|
||||||
|
|
||||||
|
hide = False
|
||||||
|
installed = False
|
||||||
|
last_updated = None
|
||||||
|
new = True
|
||||||
|
selected_tag = None
|
||||||
|
show_beta = False
|
||||||
|
track = True
|
||||||
|
updated_info = False
|
||||||
|
first_install = True
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryInformation:
|
||||||
|
"""RepositoryInformation."""
|
||||||
|
|
||||||
|
additional_info = None
|
||||||
|
authors = []
|
||||||
|
category = None
|
||||||
|
default_branch = None
|
||||||
|
description = ""
|
||||||
|
state = None
|
||||||
|
full_name = None
|
||||||
|
file_name = None
|
||||||
|
javascript_type = None
|
||||||
|
homeassistant_version = None
|
||||||
|
last_updated = None
|
||||||
|
uid = None
|
||||||
|
stars = 0
|
||||||
|
info = None
|
||||||
|
name = None
|
||||||
|
topics = []
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryReleases:
|
||||||
|
"""RepositoyReleases."""
|
||||||
|
|
||||||
|
last_release = None
|
||||||
|
last_release_object = None
|
||||||
|
last_release_object_downloads = None
|
||||||
|
published_tags = []
|
||||||
|
objects = []
|
||||||
|
releases = False
|
||||||
|
downloads = None
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryPath:
|
||||||
|
"""RepositoryPath."""
|
||||||
|
|
||||||
|
local = None
|
||||||
|
remote = None
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryContent:
|
||||||
|
"""RepositoryContent."""
|
||||||
|
|
||||||
|
path = None
|
||||||
|
files = []
|
||||||
|
objects = []
|
||||||
|
single = False
|
||||||
|
|
||||||
|
|
||||||
|
class HacsRepository:
|
||||||
|
"""HacsRepository."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Set up HacsRepository."""
|
||||||
|
self.hacs = get_hacs()
|
||||||
|
self.data = RepositoryData()
|
||||||
|
self.content = RepositoryContent()
|
||||||
|
self.content.path = RepositoryPath()
|
||||||
|
self.information = RepositoryInformation()
|
||||||
|
self.repository_object = None
|
||||||
|
self.status = RepositoryStatus()
|
||||||
|
self.state = None
|
||||||
|
self.integration_manifest = {}
|
||||||
|
self.repository_manifest = HacsManifest.from_dict({})
|
||||||
|
self.validate = Validate()
|
||||||
|
self.releases = RepositoryReleases()
|
||||||
|
self.versions = RepositoryVersions()
|
||||||
|
self.pending_restart = False
|
||||||
|
self.tree = []
|
||||||
|
self.treefiles = []
|
||||||
|
self.ref = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pending_upgrade(self):
|
||||||
|
"""Return pending upgrade."""
|
||||||
|
if self.status.installed:
|
||||||
|
if self.status.selected_tag is not None:
|
||||||
|
if self.status.selected_tag == self.data.default_branch:
|
||||||
|
if self.versions.installed_commit != self.versions.available_commit:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
if self.display_installed_version != self.display_available_version:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config_flow(self):
|
||||||
|
"""Return bool if integration has config_flow."""
|
||||||
|
if self.integration_manifest:
|
||||||
|
if self.data.full_name == "hacs/integration":
|
||||||
|
return False
|
||||||
|
return self.integration_manifest.get("config_flow", False)
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def custom(self):
|
||||||
|
"""Return flag if the repository is custom."""
|
||||||
|
if self.data.full_name.split("/")[0] in ["custom-components", "custom-cards"]:
|
||||||
|
return False
|
||||||
|
if self.data.full_name.lower() in [x.lower() for x in self.hacs.common.default]:
|
||||||
|
return False
|
||||||
|
if self.data.full_name == "hacs/integration":
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def can_install(self):
|
||||||
|
"""Return bool if repository can be installed."""
|
||||||
|
target = None
|
||||||
|
if self.information.homeassistant_version is not None:
|
||||||
|
target = self.information.homeassistant_version
|
||||||
|
if self.repository_manifest is not None:
|
||||||
|
if self.data.homeassistant is not None:
|
||||||
|
target = self.data.homeassistant
|
||||||
|
|
||||||
|
if target is not None:
|
||||||
|
if self.releases.releases:
|
||||||
|
if not version_left_higher_then_right(
|
||||||
|
self.hacs.system.ha_version, target
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_name(self):
|
||||||
|
"""Return display name."""
|
||||||
|
return get_repository_name(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_status(self):
|
||||||
|
"""Return display_status."""
|
||||||
|
if self.status.new:
|
||||||
|
status = "new"
|
||||||
|
elif self.pending_restart:
|
||||||
|
status = "pending-restart"
|
||||||
|
elif self.pending_upgrade:
|
||||||
|
status = "pending-upgrade"
|
||||||
|
elif self.status.installed:
|
||||||
|
status = "installed"
|
||||||
|
else:
|
||||||
|
status = "default"
|
||||||
|
return status
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_status_description(self):
|
||||||
|
"""Return display_status_description."""
|
||||||
|
description = {
|
||||||
|
"default": "Not installed.",
|
||||||
|
"pending-restart": "Restart pending.",
|
||||||
|
"pending-upgrade": "Upgrade pending.",
|
||||||
|
"installed": "No action required.",
|
||||||
|
"new": "This is a newly added repository.",
|
||||||
|
}
|
||||||
|
return description[self.display_status]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_installed_version(self):
|
||||||
|
"""Return display_authors"""
|
||||||
|
if self.versions.installed is not None:
|
||||||
|
installed = self.versions.installed
|
||||||
|
else:
|
||||||
|
if self.versions.installed_commit is not None:
|
||||||
|
installed = self.versions.installed_commit
|
||||||
|
else:
|
||||||
|
installed = ""
|
||||||
|
return installed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_available_version(self):
|
||||||
|
"""Return display_authors"""
|
||||||
|
if self.versions.available is not None:
|
||||||
|
available = self.versions.available
|
||||||
|
else:
|
||||||
|
if self.versions.available_commit is not None:
|
||||||
|
available = self.versions.available_commit
|
||||||
|
else:
|
||||||
|
available = ""
|
||||||
|
return available
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_version_or_commit(self):
|
||||||
|
"""Does the repositoriy use releases or commits?"""
|
||||||
|
if self.releases.releases:
|
||||||
|
version_or_commit = "version"
|
||||||
|
else:
|
||||||
|
version_or_commit = "commit"
|
||||||
|
return version_or_commit
|
||||||
|
|
||||||
|
@property
|
||||||
|
def main_action(self):
|
||||||
|
"""Return the main action."""
|
||||||
|
actions = {
|
||||||
|
"new": "INSTALL",
|
||||||
|
"default": "INSTALL",
|
||||||
|
"installed": "REINSTALL",
|
||||||
|
"pending-restart": "REINSTALL",
|
||||||
|
"pending-upgrade": "UPGRADE",
|
||||||
|
}
|
||||||
|
return actions[self.display_status]
|
||||||
|
|
||||||
|
async def common_validate(self):
|
||||||
|
"""Common validation steps of the repository."""
|
||||||
|
await common_validate(self)
|
||||||
|
|
||||||
|
async def common_registration(self):
|
||||||
|
"""Common registration steps of the repository."""
|
||||||
|
# Attach repository
|
||||||
|
if self.repository_object is None:
|
||||||
|
self.repository_object = await get_repository(
|
||||||
|
self.hacs.session, self.hacs.configuration.token, self.data.full_name
|
||||||
|
)
|
||||||
|
self.data.update_data(self.repository_object.attributes)
|
||||||
|
|
||||||
|
# Set id
|
||||||
|
self.information.uid = str(self.data.id)
|
||||||
|
|
||||||
|
# Set topics
|
||||||
|
self.data.topics = self.data.topics
|
||||||
|
|
||||||
|
# Set stargazers_count
|
||||||
|
self.data.stargazers_count = self.data.stargazers_count
|
||||||
|
|
||||||
|
# Set description
|
||||||
|
self.data.description = self.data.description
|
||||||
|
|
||||||
|
async def common_update(self):
|
||||||
|
"""Common information update steps of the repository."""
|
||||||
|
self.logger.debug("Getting repository information")
|
||||||
|
|
||||||
|
# Attach repository
|
||||||
|
await common_update_data(self)
|
||||||
|
|
||||||
|
# Update last updaeted
|
||||||
|
self.information.last_updated = self.repository_object.attributes.get(
|
||||||
|
"pushed_at", 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update last available commit
|
||||||
|
await self.repository_object.set_last_commit()
|
||||||
|
self.versions.available_commit = self.repository_object.last_commit
|
||||||
|
|
||||||
|
# Get the content of hacs.json
|
||||||
|
await self.get_repository_manifest_content()
|
||||||
|
|
||||||
|
# Update "info.md"
|
||||||
|
self.information.additional_info = await get_info_md_content(self)
|
||||||
|
|
||||||
|
async def install(self):
|
||||||
|
"""Common installation steps of the repository."""
|
||||||
|
await install_repository(self)
|
||||||
|
|
||||||
|
async def download_zip(self, validate):
|
||||||
|
"""Download ZIP archive from repository release."""
|
||||||
|
try:
|
||||||
|
contents = False
|
||||||
|
|
||||||
|
for release in self.releases.objects:
|
||||||
|
self.logger.info(f"ref: {self.ref} --- tag: {release.tag_name}")
|
||||||
|
if release.tag_name == self.ref.split("/")[1]:
|
||||||
|
contents = release.assets
|
||||||
|
|
||||||
|
if not contents:
|
||||||
|
return validate
|
||||||
|
|
||||||
|
for content in contents or []:
|
||||||
|
filecontent = await async_download_file(content.download_url)
|
||||||
|
|
||||||
|
if filecontent is None:
|
||||||
|
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
result = await async_save_file(
|
||||||
|
f"{tempfile.gettempdir()}/{self.data.filename}", filecontent
|
||||||
|
)
|
||||||
|
with zipfile.ZipFile(
|
||||||
|
f"{tempfile.gettempdir()}/{self.data.filename}", "r"
|
||||||
|
) as zip_file:
|
||||||
|
zip_file.extractall(self.content.path.local)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
self.logger.info(f"download of {content.name} complete")
|
||||||
|
continue
|
||||||
|
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||||
|
except Exception:
|
||||||
|
validate.errors.append(f"Download was not complete.")
|
||||||
|
|
||||||
|
return validate
|
||||||
|
|
||||||
|
async def download_content(self, validate, directory_path, local_directory, ref):
|
||||||
|
"""Download the content of a directory."""
|
||||||
|
from custom_components.hacs.helpers.download import download_content
|
||||||
|
|
||||||
|
validate = await download_content(self)
|
||||||
|
return validate
|
||||||
|
|
||||||
|
async def get_repository_manifest_content(self):
|
||||||
|
"""Get the content of the hacs.json file."""
|
||||||
|
if not "hacs.json" in [x.filename for x in self.tree]:
|
||||||
|
return
|
||||||
|
if self.ref is None:
|
||||||
|
self.ref = version_to_install(self)
|
||||||
|
try:
|
||||||
|
manifest = await self.repository_object.get_contents("hacs.json", self.ref)
|
||||||
|
self.repository_manifest = HacsManifest.from_dict(
|
||||||
|
json.loads(manifest.content)
|
||||||
|
)
|
||||||
|
self.data.update_data(json.loads(manifest.content))
|
||||||
|
except (AIOGitHubException, Exception): # Gotta Catch 'Em All
|
||||||
|
pass
|
||||||
|
|
||||||
|
def remove(self):
|
||||||
|
"""Run remove tasks."""
|
||||||
|
self.logger.info("Starting removal")
|
||||||
|
|
||||||
|
if self.information.uid in self.hacs.common.installed:
|
||||||
|
self.hacs.common.installed.remove(self.information.uid)
|
||||||
|
for repository in self.hacs.repositories:
|
||||||
|
if repository.information.uid == self.information.uid:
|
||||||
|
self.hacs.repositories.remove(repository)
|
||||||
|
|
||||||
|
async def uninstall(self):
|
||||||
|
"""Run uninstall tasks."""
|
||||||
|
self.logger.info("Uninstalling")
|
||||||
|
await self.remove_local_directory()
|
||||||
|
self.status.installed = False
|
||||||
|
if self.data.category == "integration":
|
||||||
|
if self.config_flow:
|
||||||
|
await self.reload_custom_components()
|
||||||
|
else:
|
||||||
|
self.pending_restart = True
|
||||||
|
elif self.data.category == "theme":
|
||||||
|
try:
|
||||||
|
await self.hacs.hass.services.async_call(
|
||||||
|
"frontend", "reload_themes", {}
|
||||||
|
)
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
pass
|
||||||
|
if self.data.full_name in self.hacs.common.installed:
|
||||||
|
self.hacs.common.installed.remove(self.data.full_name)
|
||||||
|
self.versions.installed = None
|
||||||
|
self.versions.installed_commit = None
|
||||||
|
self.hacs.hass.bus.async_fire(
|
||||||
|
"hacs/repository",
|
||||||
|
{"id": 1337, "action": "uninstall", "repository": self.data.full_name},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def remove_local_directory(self):
|
||||||
|
"""Check the local directory."""
|
||||||
|
import shutil
|
||||||
|
from asyncio import sleep
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.data.category == "python_script":
|
||||||
|
local_path = "{}/{}.py".format(self.content.path.local, self.data.name)
|
||||||
|
elif self.data.category == "theme":
|
||||||
|
local_path = "{}/{}.yaml".format(
|
||||||
|
self.content.path.local, self.data.name
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
local_path = self.content.path.local
|
||||||
|
|
||||||
|
if os.path.exists(local_path):
|
||||||
|
self.logger.debug(f"Removing {local_path}")
|
||||||
|
|
||||||
|
if self.data.category in ["python_script", "theme"]:
|
||||||
|
os.remove(local_path)
|
||||||
|
else:
|
||||||
|
shutil.rmtree(local_path)
|
||||||
|
|
||||||
|
while os.path.exists(local_path):
|
||||||
|
await sleep(1)
|
||||||
|
|
||||||
|
except Exception as exception:
|
||||||
|
self.logger.debug(f"Removing {local_path} failed with {exception}")
|
||||||
|
return
|
|
@ -0,0 +1,82 @@
|
||||||
|
"""Repository data."""
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List
|
||||||
|
import attr
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(auto_attribs=True)
|
||||||
|
class RepositoryData:
|
||||||
|
"""RepositoryData class."""
|
||||||
|
|
||||||
|
id: int = 0
|
||||||
|
full_name: str = ""
|
||||||
|
pushed_at: str = ""
|
||||||
|
category: str = ""
|
||||||
|
archived: bool = False
|
||||||
|
description: str = ""
|
||||||
|
manifest_name: str = None
|
||||||
|
topics: List[str] = []
|
||||||
|
fork: bool = False
|
||||||
|
domain: str = ""
|
||||||
|
default_branch: str = None
|
||||||
|
stargazers_count: int = 0
|
||||||
|
last_commit: str = ""
|
||||||
|
file_name: str = ""
|
||||||
|
content_in_root: bool = False
|
||||||
|
zip_release: bool = False
|
||||||
|
filename: str = ""
|
||||||
|
render_readme: bool = False
|
||||||
|
hide_default_branch: bool = False
|
||||||
|
domains: List[str] = []
|
||||||
|
country: List[str] = []
|
||||||
|
authors: List[str] = []
|
||||||
|
homeassistant: str = None # Minimum Home Assistant version
|
||||||
|
hacs: str = None # Minimum HACS version
|
||||||
|
persistent_directory: str = None
|
||||||
|
iot_class: str = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return the name."""
|
||||||
|
if self.category == "integration":
|
||||||
|
return self.domain
|
||||||
|
return self.full_name.split("/")[-1]
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
"""Export to json."""
|
||||||
|
return self.__dict__
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_from_dict(source: dict):
|
||||||
|
"""Set attributes from dicts."""
|
||||||
|
data = RepositoryData()
|
||||||
|
for key in source:
|
||||||
|
if key in data.__dict__:
|
||||||
|
if key == "pushed_at":
|
||||||
|
setattr(
|
||||||
|
data, key, datetime.strptime(source[key], "%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
)
|
||||||
|
elif key == "county":
|
||||||
|
if isinstance(source[key], str):
|
||||||
|
setattr(data, key, [source[key]])
|
||||||
|
else:
|
||||||
|
setattr(data, key, source[key])
|
||||||
|
else:
|
||||||
|
setattr(data, key, source[key])
|
||||||
|
return data
|
||||||
|
|
||||||
|
def update_data(self, data: dict):
|
||||||
|
"""Update data of the repository."""
|
||||||
|
for key in data:
|
||||||
|
if key in self.__dict__:
|
||||||
|
if key == "pushed_at":
|
||||||
|
setattr(
|
||||||
|
self, key, datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
)
|
||||||
|
elif key == "county":
|
||||||
|
if isinstance(data[key], str):
|
||||||
|
setattr(self, key, [data[key]])
|
||||||
|
else:
|
||||||
|
setattr(self, key, data[key])
|
||||||
|
else:
|
||||||
|
setattr(self, key, data[key])
|
|
@ -0,0 +1,72 @@
|
||||||
|
"""Class for themes in HACS."""
|
||||||
|
from integrationhelper import Logger
|
||||||
|
from .repository import HacsRepository
|
||||||
|
from ..hacsbase.exceptions import HacsException
|
||||||
|
from ..helpers.information import find_file_name
|
||||||
|
|
||||||
|
|
||||||
|
class HacsTheme(HacsRepository):
|
||||||
|
"""Themes in HACS."""
|
||||||
|
|
||||||
|
def __init__(self, full_name):
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__()
|
||||||
|
self.data.full_name = full_name
|
||||||
|
self.data.category = "theme"
|
||||||
|
self.content.path.remote = "themes"
|
||||||
|
self.content.path.local = f"{self.hacs.system.config_path}/themes/"
|
||||||
|
self.content.single = False
|
||||||
|
self.logger = Logger(f"hacs.repository.{self.data.category}.{full_name}")
|
||||||
|
|
||||||
|
async def validate_repository(self):
|
||||||
|
"""Validate."""
|
||||||
|
# Run common validation steps.
|
||||||
|
await self.common_validate()
|
||||||
|
|
||||||
|
# Custom step 1: Validate content.
|
||||||
|
compliant = False
|
||||||
|
for treefile in self.treefiles:
|
||||||
|
if treefile.startswith("themes/") and treefile.endswith(".yaml"):
|
||||||
|
compliant = True
|
||||||
|
break
|
||||||
|
if not compliant:
|
||||||
|
raise HacsException(
|
||||||
|
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.data.content_in_root:
|
||||||
|
self.content.path.remote = ""
|
||||||
|
|
||||||
|
# Handle potential errors
|
||||||
|
if self.validate.errors:
|
||||||
|
for error in self.validate.errors:
|
||||||
|
if not self.hacs.system.status.startup:
|
||||||
|
self.logger.error(error)
|
||||||
|
return self.validate.success
|
||||||
|
|
||||||
|
async def registration(self):
|
||||||
|
"""Registration."""
|
||||||
|
if not await self.validate_repository():
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Run common registration steps.
|
||||||
|
await self.common_registration()
|
||||||
|
|
||||||
|
# Set name
|
||||||
|
find_file_name(self)
|
||||||
|
self.content.path.local = f"{self.hacs.system.config_path}/themes/{self.data.file_name.replace('.yaml', '')}"
|
||||||
|
|
||||||
|
async def update_repository(self): # lgtm[py/similar-function]
|
||||||
|
"""Update."""
|
||||||
|
if self.hacs.github.ratelimits.remaining == 0:
|
||||||
|
return
|
||||||
|
# Run common update steps.
|
||||||
|
await self.common_update()
|
||||||
|
|
||||||
|
# Get theme objects.
|
||||||
|
if self.data.content_in_root:
|
||||||
|
self.content.path.remote = ""
|
||||||
|
|
||||||
|
# Update name
|
||||||
|
find_file_name(self)
|
||||||
|
self.content.path.local = f"{self.hacs.system.config_path}/themes/{self.data.file_name.replace('.yaml', '')}"
|
|
@ -0,0 +1,99 @@
|
||||||
|
"""Sensor platform for HACS."""
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
from homeassistant.helpers.entity import Entity
|
||||||
|
from .hacsbase import Hacs as hacs
|
||||||
|
from .const import DOMAIN, VERSION, NAME_SHORT
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||||
|
"""Setup sensor platform."""
|
||||||
|
async_add_entities([HACSSensor()])
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass, config_entry, async_add_devices):
|
||||||
|
"""Setup sensor platform."""
|
||||||
|
async_add_devices([HACSSensor()])
|
||||||
|
|
||||||
|
|
||||||
|
class HACSDevice(Entity):
|
||||||
|
"""HACS Device class."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_info(self):
|
||||||
|
"""Return device information about HACS."""
|
||||||
|
return {
|
||||||
|
"identifiers": {(DOMAIN, self.unique_id)},
|
||||||
|
"name": NAME_SHORT,
|
||||||
|
"manufacturer": "hacs.xyz",
|
||||||
|
"model": "",
|
||||||
|
"sw_version": VERSION,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class HACSSensor(HACSDevice):
|
||||||
|
"""HACS Sensor class."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize."""
|
||||||
|
self._state = None
|
||||||
|
self.repositories = []
|
||||||
|
|
||||||
|
async def async_update(self):
|
||||||
|
"""Update the sensor."""
|
||||||
|
if hacs.system.status.background_task:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.repositories = []
|
||||||
|
|
||||||
|
for repository in hacs.repositories:
|
||||||
|
if (
|
||||||
|
repository.pending_upgrade
|
||||||
|
and repository.data.category in hacs.common.categories
|
||||||
|
):
|
||||||
|
self.repositories.append(repository)
|
||||||
|
self._state = len(self.repositories)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unique_id(self):
|
||||||
|
"""Return a unique ID to use for this sensor."""
|
||||||
|
return (
|
||||||
|
"0717a0cd-745c-48fd-9b16-c8534c9704f9-bc944b0f-fd42-4a58-a072-ade38d1444cd"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return the name of the sensor."""
|
||||||
|
return "hacs"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
"""Return the state of the sensor."""
|
||||||
|
return self._state
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon(self):
|
||||||
|
"""Return the icon of the sensor."""
|
||||||
|
return "hacs:hacs"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unit_of_measurement(self):
|
||||||
|
"""Return the unit of measurement."""
|
||||||
|
return "pending update(s)"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_state_attributes(self):
|
||||||
|
"""Return attributes for the sensor."""
|
||||||
|
data = []
|
||||||
|
for repository in self.repositories:
|
||||||
|
data.append(
|
||||||
|
{
|
||||||
|
"name": repository.data.full_name,
|
||||||
|
"display_name": repository.display_name,
|
||||||
|
"installed version": repository.display_installed_version,
|
||||||
|
"available version": repository.display_available_version,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"repositories": data,
|
||||||
|
"attribution": "It is expected to see [object Object] here, for more info see https://hacs.xyz/docs/basic/sensor",
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
install:
|
||||||
|
description: This is NOT intended to be used here, this is intended for developers!
|
||||||
|
fields:
|
||||||
|
repository:
|
||||||
|
description: The repository ID
|
||||||
|
example: '"123456789"'
|
||||||
|
register:
|
||||||
|
description: This is NOT intended to be used here, this is intended for developers!
|
||||||
|
fields:
|
||||||
|
repository:
|
||||||
|
description: The full name of the repository
|
||||||
|
example: 'developer/repo'
|
||||||
|
repository_type:
|
||||||
|
description: The repository type
|
||||||
|
example: 'plugin'
|
|
@ -0,0 +1,110 @@
|
||||||
|
"""Setup functions for HACS."""
|
||||||
|
# pylint: disable=bad-continuation
|
||||||
|
from hacs_frontend.version import VERSION as FE_VERSION
|
||||||
|
from homeassistant.helpers import discovery
|
||||||
|
|
||||||
|
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||||
|
from custom_components.hacs.const import VERSION, DOMAIN
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
from custom_components.hacs.helpers.information import get_repository
|
||||||
|
from custom_components.hacs.helpers.register_repository import register_repository
|
||||||
|
|
||||||
|
|
||||||
|
async def load_hacs_repository():
|
||||||
|
"""Load HACS repositroy."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
|
||||||
|
try:
|
||||||
|
repository = hacs.get_by_name("hacs/integration")
|
||||||
|
if repository is None:
|
||||||
|
await register_repository("hacs/integration", "integration")
|
||||||
|
repository = hacs.get_by_name("hacs/integration")
|
||||||
|
if repository is None:
|
||||||
|
raise HacsException("Unknown error")
|
||||||
|
repository.status.installed = True
|
||||||
|
repository.versions.installed = VERSION
|
||||||
|
repository.status.new = False
|
||||||
|
hacs.repo = repository.repository_object
|
||||||
|
hacs.data_repo = await get_repository(
|
||||||
|
hacs.session, hacs.configuration.token, "hacs/default"
|
||||||
|
)
|
||||||
|
except HacsException as exception:
|
||||||
|
if "403" in f"{exception}":
|
||||||
|
hacs.logger.critical("GitHub API is ratelimited, or the token is wrong.")
|
||||||
|
else:
|
||||||
|
hacs.logger.critical(f"[{exception}] - Could not load HACS!")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def setup_extra_stores():
|
||||||
|
"""Set up extra stores in HACS if enabled in Home Assistant."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
if "python_script" in hacs.hass.config.components:
|
||||||
|
if "python_script" not in hacs.common.categories:
|
||||||
|
hacs.common.categories.append("python_script")
|
||||||
|
|
||||||
|
if hacs.hass.services.services.get("frontend", {}).get("reload_themes") is not None:
|
||||||
|
if "theme" not in hacs.common.categories:
|
||||||
|
hacs.common.categories.append("theme")
|
||||||
|
|
||||||
|
|
||||||
|
def add_sensor():
|
||||||
|
"""Add sensor."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
|
||||||
|
try:
|
||||||
|
if hacs.configuration.config_type == "yaml":
|
||||||
|
hacs.hass.async_create_task(
|
||||||
|
discovery.async_load_platform(
|
||||||
|
hacs.hass, "sensor", DOMAIN, {}, hacs.configuration.config
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
hacs.hass.async_add_job(
|
||||||
|
hacs.hass.config_entries.async_forward_entry_setup(
|
||||||
|
hacs.configuration.config_entry, "sensor"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
async def setup_frontend():
|
||||||
|
"""Configure the HACS frontend elements."""
|
||||||
|
from .http import HacsFrontend, HacsPluginViewLegacy
|
||||||
|
from .ws_api_handlers import setup_ws_api
|
||||||
|
|
||||||
|
hacs = get_hacs()
|
||||||
|
|
||||||
|
hacs.hass.http.register_view(HacsFrontend())
|
||||||
|
hacs.frontend.version_running = FE_VERSION
|
||||||
|
|
||||||
|
# Legacy views, remove with 2.0
|
||||||
|
hacs.hass.http.register_view(HacsPluginViewLegacy())
|
||||||
|
|
||||||
|
# Add to sidepanel
|
||||||
|
custom_panel_config = {
|
||||||
|
"name": "hacs-frontend",
|
||||||
|
"embed_iframe": False,
|
||||||
|
"trust_external": False,
|
||||||
|
"js_url": f"/hacsfiles/frontend-{hacs.frontend.version_running}.js",
|
||||||
|
}
|
||||||
|
|
||||||
|
config = {}
|
||||||
|
config["_panel_custom"] = custom_panel_config
|
||||||
|
|
||||||
|
hacs.hass.components.frontend.async_register_built_in_panel(
|
||||||
|
component_name="custom",
|
||||||
|
sidebar_title=hacs.configuration.sidepanel_title,
|
||||||
|
sidebar_icon=hacs.configuration.sidepanel_icon,
|
||||||
|
frontend_url_path="hacs",
|
||||||
|
config=config,
|
||||||
|
require_admin=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if "frontend_extra_module_url" not in hacs.hass.data:
|
||||||
|
hacs.hass.data["frontend_extra_module_url"] = set()
|
||||||
|
hacs.hass.data["frontend_extra_module_url"].add("/hacsfiles/iconset.js")
|
||||||
|
|
||||||
|
await setup_ws_api(hacs.hass)
|
|
@ -0,0 +1,19 @@
|
||||||
|
"""Storage handers."""
|
||||||
|
from homeassistant.helpers.json import JSONEncoder
|
||||||
|
from homeassistant.helpers.storage import Store
|
||||||
|
from .hacsbase.const import STORAGE_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
async def async_load_from_store(hass, key):
|
||||||
|
"""Load the retained data from store and return de-serialized data."""
|
||||||
|
store = Store(hass, STORAGE_VERSION, f"hacs.{key}", encoder=JSONEncoder)
|
||||||
|
restored = await store.async_load()
|
||||||
|
if restored is None:
|
||||||
|
return {}
|
||||||
|
return restored
|
||||||
|
|
||||||
|
|
||||||
|
async def async_save_to_store(hass, key, data):
|
||||||
|
"""Generate dynamic data to store and save it to the filesystem."""
|
||||||
|
store = Store(hass, STORAGE_VERSION, f"hacs.{key}", encoder=JSONEncoder)
|
||||||
|
await store.async_save(data)
|
|
@ -0,0 +1,379 @@
|
||||||
|
"""WebSocket API for HACS."""
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import voluptuous as vol
|
||||||
|
from aiogithubapi import AIOGitHubException
|
||||||
|
from homeassistant.components import websocket_api
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from .hacsbase.exceptions import HacsException
|
||||||
|
from .store import async_load_from_store, async_save_to_store
|
||||||
|
|
||||||
|
from custom_components.hacs.globals import get_hacs
|
||||||
|
from custom_components.hacs.helpers.register_repository import register_repository
|
||||||
|
|
||||||
|
|
||||||
|
async def setup_ws_api(hass):
|
||||||
|
"""Set up WS API handlers."""
|
||||||
|
websocket_api.async_register_command(hass, hacs_settings)
|
||||||
|
websocket_api.async_register_command(hass, hacs_config)
|
||||||
|
websocket_api.async_register_command(hass, hacs_repositories)
|
||||||
|
websocket_api.async_register_command(hass, hacs_repository)
|
||||||
|
websocket_api.async_register_command(hass, hacs_repository_data)
|
||||||
|
websocket_api.async_register_command(hass, check_local_path)
|
||||||
|
websocket_api.async_register_command(hass, hacs_status)
|
||||||
|
websocket_api.async_register_command(hass, acknowledge_critical_repository)
|
||||||
|
websocket_api.async_register_command(hass, get_critical_repositories)
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command(
|
||||||
|
{
|
||||||
|
vol.Required("type"): "hacs/settings",
|
||||||
|
vol.Optional("action"): cv.string,
|
||||||
|
vol.Optional("category"): cv.string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
async def hacs_settings(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
action = msg["action"]
|
||||||
|
hacs.logger.debug(f"WS action '{action}'")
|
||||||
|
|
||||||
|
if action == "set_fe_grid":
|
||||||
|
hacs.configuration.frontend_mode = "Grid"
|
||||||
|
|
||||||
|
elif action == "onboarding_done":
|
||||||
|
hacs.configuration.onboarding_done = True
|
||||||
|
|
||||||
|
elif action == "set_fe_table":
|
||||||
|
hacs.configuration.frontend_mode = "Table"
|
||||||
|
|
||||||
|
elif action == "set_fe_compact_true":
|
||||||
|
hacs.configuration.frontend_compact = False
|
||||||
|
|
||||||
|
elif action == "set_fe_compact_false":
|
||||||
|
hacs.configuration.frontend_compact = True
|
||||||
|
|
||||||
|
elif action == "reload_data":
|
||||||
|
hacs.system.status.reloading_data = True
|
||||||
|
hass.bus.async_fire("hacs/status", {})
|
||||||
|
await hacs.recuring_tasks_all()
|
||||||
|
hacs.system.status.reloading_data = False
|
||||||
|
hass.bus.async_fire("hacs/status", {})
|
||||||
|
|
||||||
|
elif action == "upgrade_all":
|
||||||
|
hacs.system.status.upgrading_all = True
|
||||||
|
hacs.system.status.background_task = True
|
||||||
|
hass.bus.async_fire("hacs/status", {})
|
||||||
|
for repository in hacs.repositories:
|
||||||
|
if repository.pending_upgrade:
|
||||||
|
repository.status.selected_tag = None
|
||||||
|
await repository.install()
|
||||||
|
hacs.system.status.upgrading_all = False
|
||||||
|
hacs.system.status.background_task = False
|
||||||
|
hass.bus.async_fire("hacs/status", {})
|
||||||
|
hass.bus.async_fire("hacs/repository", {})
|
||||||
|
|
||||||
|
elif action == "clear_new":
|
||||||
|
for repo in hacs.repositories:
|
||||||
|
if msg.get("category") == repo.data.category:
|
||||||
|
if repo.status.new:
|
||||||
|
hacs.logger.debug(
|
||||||
|
f"Clearing new flag from '{repo.data.full_name}'"
|
||||||
|
)
|
||||||
|
repo.status.new = False
|
||||||
|
else:
|
||||||
|
hacs.logger.error(f"WS action '{action}' is not valid")
|
||||||
|
hass.bus.async_fire("hacs/config", {})
|
||||||
|
await hacs.data.async_write()
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command({vol.Required("type"): "hacs/config"})
|
||||||
|
async def hacs_config(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
config = hacs.configuration
|
||||||
|
|
||||||
|
content = {}
|
||||||
|
content["frontend_mode"] = config.frontend_mode
|
||||||
|
content["frontend_compact"] = config.frontend_compact
|
||||||
|
content["onboarding_done"] = config.onboarding_done
|
||||||
|
content["version"] = hacs.version
|
||||||
|
content["dev"] = config.dev
|
||||||
|
content["debug"] = config.debug
|
||||||
|
content["country"] = config.country
|
||||||
|
content["experimental"] = config.experimental
|
||||||
|
content["categories"] = hacs.common.categories
|
||||||
|
|
||||||
|
connection.send_message(websocket_api.result_message(msg["id"], content))
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command({vol.Required("type"): "hacs/status"})
|
||||||
|
async def hacs_status(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
content = {
|
||||||
|
"startup": hacs.system.status.startup,
|
||||||
|
"background_task": hacs.system.status.background_task,
|
||||||
|
"lovelace_mode": hacs.system.lovelace_mode,
|
||||||
|
"reloading_data": hacs.system.status.reloading_data,
|
||||||
|
"upgrading_all": hacs.system.status.upgrading_all,
|
||||||
|
"disabled": hacs.system.disabled,
|
||||||
|
}
|
||||||
|
connection.send_message(websocket_api.result_message(msg["id"], content))
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command({vol.Required("type"): "hacs/repositories"})
|
||||||
|
async def hacs_repositories(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
repositories = hacs.repositories
|
||||||
|
content = []
|
||||||
|
for repo in repositories:
|
||||||
|
if repo.data.category in hacs.common.categories:
|
||||||
|
data = {
|
||||||
|
"additional_info": repo.information.additional_info,
|
||||||
|
"authors": repo.data.authors,
|
||||||
|
"available_version": repo.display_available_version,
|
||||||
|
"beta": repo.status.show_beta,
|
||||||
|
"can_install": repo.can_install,
|
||||||
|
"category": repo.data.category,
|
||||||
|
"country": repo.data.country,
|
||||||
|
"config_flow": repo.config_flow,
|
||||||
|
"custom": repo.custom,
|
||||||
|
"default_branch": repo.data.default_branch,
|
||||||
|
"description": repo.data.description,
|
||||||
|
"domain": repo.integration_manifest.get("domain"),
|
||||||
|
"downloads": repo.releases.downloads,
|
||||||
|
"file_name": repo.data.file_name,
|
||||||
|
"first_install": repo.status.first_install,
|
||||||
|
"full_name": repo.data.full_name,
|
||||||
|
"hide": repo.status.hide,
|
||||||
|
"hide_default_branch": repo.data.hide_default_branch,
|
||||||
|
"homeassistant": repo.data.homeassistant,
|
||||||
|
"id": repo.information.uid,
|
||||||
|
"info": repo.information.info,
|
||||||
|
"installed_version": repo.display_installed_version,
|
||||||
|
"installed": repo.status.installed,
|
||||||
|
"javascript_type": repo.information.javascript_type,
|
||||||
|
"last_updated": repo.information.last_updated,
|
||||||
|
"local_path": repo.content.path.local,
|
||||||
|
"main_action": repo.main_action,
|
||||||
|
"name": repo.display_name,
|
||||||
|
"new": repo.status.new,
|
||||||
|
"pending_upgrade": repo.pending_upgrade,
|
||||||
|
"releases": repo.releases.published_tags,
|
||||||
|
"selected_tag": repo.status.selected_tag,
|
||||||
|
"stars": repo.data.stargazers_count,
|
||||||
|
"state": repo.state,
|
||||||
|
"status_description": repo.display_status_description,
|
||||||
|
"status": repo.display_status,
|
||||||
|
"topics": repo.data.topics,
|
||||||
|
"updated_info": repo.status.updated_info,
|
||||||
|
"version_or_commit": repo.display_version_or_commit,
|
||||||
|
}
|
||||||
|
|
||||||
|
content.append(data)
|
||||||
|
|
||||||
|
connection.send_message(websocket_api.result_message(msg["id"], content))
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command(
|
||||||
|
{
|
||||||
|
vol.Required("type"): "hacs/repository",
|
||||||
|
vol.Optional("action"): cv.string,
|
||||||
|
vol.Optional("repository"): cv.string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
async def hacs_repository(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
try:
|
||||||
|
repo_id = msg.get("repository")
|
||||||
|
action = msg.get("action")
|
||||||
|
|
||||||
|
if repo_id is None or action is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
repository = hacs.get_by_id(repo_id)
|
||||||
|
hacs.logger.debug(f"Running {action} for {repository.data.full_name}")
|
||||||
|
|
||||||
|
if action == "update":
|
||||||
|
await repository.update_repository()
|
||||||
|
repository.status.updated_info = True
|
||||||
|
repository.status.new = False
|
||||||
|
|
||||||
|
elif action == "install":
|
||||||
|
was_installed = repository.status.installed
|
||||||
|
await repository.install()
|
||||||
|
if not was_installed:
|
||||||
|
hass.bus.async_fire("hacs/reload", {"force": False})
|
||||||
|
|
||||||
|
elif action == "uninstall":
|
||||||
|
await repository.uninstall()
|
||||||
|
hass.bus.async_fire("hacs/reload", {"force": False})
|
||||||
|
|
||||||
|
elif action == "hide":
|
||||||
|
repository.status.hide = True
|
||||||
|
|
||||||
|
elif action == "unhide":
|
||||||
|
repository.status.hide = False
|
||||||
|
|
||||||
|
elif action == "show_beta":
|
||||||
|
repository.status.show_beta = True
|
||||||
|
await repository.update_repository()
|
||||||
|
|
||||||
|
elif action == "hide_beta":
|
||||||
|
repository.status.show_beta = False
|
||||||
|
await repository.update_repository()
|
||||||
|
|
||||||
|
elif action == "delete":
|
||||||
|
repository.status.show_beta = False
|
||||||
|
repository.remove()
|
||||||
|
|
||||||
|
elif action == "set_version":
|
||||||
|
if msg["version"] == repository.data.default_branch:
|
||||||
|
repository.status.selected_tag = None
|
||||||
|
else:
|
||||||
|
repository.status.selected_tag = msg["version"]
|
||||||
|
await repository.update_repository()
|
||||||
|
|
||||||
|
else:
|
||||||
|
hacs.logger.error(f"WS action '{action}' is not valid")
|
||||||
|
|
||||||
|
repository.state = None
|
||||||
|
await hacs.data.async_write()
|
||||||
|
except AIOGitHubException as exception:
|
||||||
|
hass.bus.async_fire("hacs/error", {"message": str(exception)})
|
||||||
|
except AttributeError as exception:
|
||||||
|
hass.bus.async_fire(
|
||||||
|
"hacs/error", {"message": f"Could not use repository with ID {repo_id}"}
|
||||||
|
)
|
||||||
|
except Exception as exception: # pylint: disable=broad-except
|
||||||
|
hass.bus.async_fire("hacs/error", {"message": str(exception)})
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command(
|
||||||
|
{
|
||||||
|
vol.Required("type"): "hacs/repository/data",
|
||||||
|
vol.Optional("action"): cv.string,
|
||||||
|
vol.Optional("repository"): cv.string,
|
||||||
|
vol.Optional("data"): cv.string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
async def hacs_repository_data(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
hacs = get_hacs()
|
||||||
|
repo_id = msg.get("repository")
|
||||||
|
action = msg.get("action")
|
||||||
|
data = msg.get("data")
|
||||||
|
|
||||||
|
if repo_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if action == "add":
|
||||||
|
if "github." in repo_id:
|
||||||
|
repo_id = repo_id.split("github.com/")[1]
|
||||||
|
|
||||||
|
if repo_id in hacs.common.skip:
|
||||||
|
hacs.common.skip.remove(repo_id)
|
||||||
|
|
||||||
|
if not hacs.get_by_name(repo_id):
|
||||||
|
try:
|
||||||
|
registration = await register_repository(repo_id, data.lower())
|
||||||
|
if registration is not None:
|
||||||
|
raise HacsException(registration)
|
||||||
|
except Exception as exception: # pylint: disable=broad-except
|
||||||
|
hass.bus.async_fire(
|
||||||
|
"hacs/error",
|
||||||
|
{
|
||||||
|
"action": "add_repository",
|
||||||
|
"exception": str(sys.exc_info()[0].__name__),
|
||||||
|
"message": str(exception),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
hass.bus.async_fire(
|
||||||
|
"hacs/error",
|
||||||
|
{
|
||||||
|
"action": "add_repository",
|
||||||
|
"message": f"Repository '{repo_id}' exists in the store.",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
repository = hacs.get_by_name(repo_id)
|
||||||
|
else:
|
||||||
|
repository = hacs.get_by_id(repo_id)
|
||||||
|
|
||||||
|
if repository is None:
|
||||||
|
hass.bus.async_fire("hacs/repository", {})
|
||||||
|
return
|
||||||
|
|
||||||
|
hacs.logger.debug(f"Running {action} for {repository.data.full_name}")
|
||||||
|
|
||||||
|
if action == "set_state":
|
||||||
|
repository.state = data
|
||||||
|
|
||||||
|
elif action == "set_version":
|
||||||
|
repository.status.selected_tag = data
|
||||||
|
await repository.update_repository()
|
||||||
|
repository.state = None
|
||||||
|
|
||||||
|
elif action == "add":
|
||||||
|
repository.state = None
|
||||||
|
|
||||||
|
else:
|
||||||
|
repository.state = None
|
||||||
|
hacs.logger.error(f"WS action '{action}' is not valid")
|
||||||
|
|
||||||
|
await hacs.data.async_write()
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command(
|
||||||
|
{vol.Required("type"): "hacs/check_path", vol.Optional("path"): cv.string}
|
||||||
|
)
|
||||||
|
async def check_local_path(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
path = msg.get("path")
|
||||||
|
exist = {"exist": False}
|
||||||
|
|
||||||
|
if path is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.exists(path):
|
||||||
|
exist["exist"] = True
|
||||||
|
|
||||||
|
connection.send_message(websocket_api.result_message(msg["id"], exist))
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command({vol.Required("type"): "hacs/get_critical"})
|
||||||
|
async def get_critical_repositories(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
critical = await async_load_from_store(hass, "critical")
|
||||||
|
if not critical:
|
||||||
|
critical = []
|
||||||
|
connection.send_message(websocket_api.result_message(msg["id"], critical))
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.async_response
|
||||||
|
@websocket_api.websocket_command(
|
||||||
|
{vol.Required("type"): "hacs/critical", vol.Optional("repository"): cv.string}
|
||||||
|
)
|
||||||
|
async def acknowledge_critical_repository(hass, connection, msg):
|
||||||
|
"""Handle get media player cover command."""
|
||||||
|
repository = msg["repository"]
|
||||||
|
|
||||||
|
critical = await async_load_from_store(hass, "critical")
|
||||||
|
for repo in critical:
|
||||||
|
if repository == repo["repository"]:
|
||||||
|
repo["acknowledged"] = True
|
||||||
|
await async_save_to_store(hass, "critical", critical)
|
||||||
|
connection.send_message(websocket_api.result_message(msg["id"], critical))
|
|
@ -0,0 +1,198 @@
|
||||||
|
"""
|
||||||
|
Component to offer a way to select an option from a list.
|
||||||
|
|
||||||
|
For more details about this component, please refer to the documentation
|
||||||
|
at https://home-assistant.io/components/input_select/
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.const import ATTR_ENTITY_ID, CONF_ICON, CONF_NAME
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DOMAIN = 'input_select'
|
||||||
|
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||||
|
|
||||||
|
CONF_INITIAL = 'initial'
|
||||||
|
CONF_OPTIONS = 'options'
|
||||||
|
CONF_VALUES = 'values'
|
||||||
|
|
||||||
|
ATTR_OPTION = 'option'
|
||||||
|
ATTR_OPTIONS = 'options'
|
||||||
|
|
||||||
|
ATTR_VALUE = 'value'
|
||||||
|
ATTR_VALUES = 'values'
|
||||||
|
|
||||||
|
SERVICE_SELECT_OPTION = 'select_option'
|
||||||
|
|
||||||
|
SERVICE_SELECT_OPTION_SCHEMA = vol.Schema({
|
||||||
|
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||||
|
vol.Required(ATTR_OPTION): cv.string,
|
||||||
|
})
|
||||||
|
|
||||||
|
SERVICE_SELECT_NEXT = 'select_next'
|
||||||
|
|
||||||
|
SERVICE_SELECT_NEXT_SCHEMA = vol.Schema({
|
||||||
|
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||||
|
})
|
||||||
|
|
||||||
|
SERVICE_SELECT_PREVIOUS = 'select_previous'
|
||||||
|
|
||||||
|
SERVICE_SELECT_PREVIOUS_SCHEMA = vol.Schema({
|
||||||
|
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
SERVICE_SET_OPTIONS = 'set_options'
|
||||||
|
|
||||||
|
SERVICE_SET_OPTIONS_SCHEMA = vol.Schema({
|
||||||
|
vol.Required(ATTR_ENTITY_ID): cv.entity_ids,
|
||||||
|
vol.Required(ATTR_OPTIONS):
|
||||||
|
vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def _cv_input_select(cfg):
|
||||||
|
"""Configure validation helper for input select (voluptuous)."""
|
||||||
|
options = cfg[CONF_OPTIONS]
|
||||||
|
values = cfg.get(CONF_VALUES)
|
||||||
|
initial = cfg.get(CONF_INITIAL)
|
||||||
|
if initial is not None and initial not in options:
|
||||||
|
raise vol.Invalid('initial state "{}" is not part of the options: {}'
|
||||||
|
.format(initial, ','.join(options)))
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = vol.Schema({
|
||||||
|
DOMAIN: vol.Schema({
|
||||||
|
cv.slug: vol.All({
|
||||||
|
vol.Optional(CONF_NAME): cv.string,
|
||||||
|
vol.Required(CONF_OPTIONS):
|
||||||
|
vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]),
|
||||||
|
vol.Optional(CONF_VALUES):
|
||||||
|
vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]),
|
||||||
|
vol.Optional(CONF_INITIAL): cv.string,
|
||||||
|
vol.Optional(CONF_ICON): cv.icon,
|
||||||
|
}, _cv_input_select)})
|
||||||
|
}, required=True, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass, config):
|
||||||
|
"""Set up an input select."""
|
||||||
|
component = EntityComponent(_LOGGER, DOMAIN, hass)
|
||||||
|
|
||||||
|
entities = []
|
||||||
|
|
||||||
|
for object_id, cfg in config[DOMAIN].items():
|
||||||
|
name = cfg.get(CONF_NAME)
|
||||||
|
options = cfg.get(CONF_OPTIONS)
|
||||||
|
values = cfg.get(CONF_VALUES)
|
||||||
|
initial = cfg.get(CONF_INITIAL)
|
||||||
|
icon = cfg.get(CONF_ICON)
|
||||||
|
entities.append(InputSelect(object_id, name, initial, options, values, icon))
|
||||||
|
|
||||||
|
if not entities:
|
||||||
|
return False
|
||||||
|
|
||||||
|
component.async_register_entity_service(
|
||||||
|
SERVICE_SELECT_OPTION, SERVICE_SELECT_OPTION_SCHEMA,
|
||||||
|
'async_select_option'
|
||||||
|
)
|
||||||
|
|
||||||
|
component.async_register_entity_service(
|
||||||
|
SERVICE_SELECT_NEXT, SERVICE_SELECT_NEXT_SCHEMA,
|
||||||
|
lambda entity, call: entity.async_offset_index(1)
|
||||||
|
)
|
||||||
|
|
||||||
|
component.async_register_entity_service(
|
||||||
|
SERVICE_SELECT_PREVIOUS, SERVICE_SELECT_PREVIOUS_SCHEMA,
|
||||||
|
lambda entity, call: entity.async_offset_index(-1)
|
||||||
|
)
|
||||||
|
|
||||||
|
component.async_register_entity_service(
|
||||||
|
SERVICE_SET_OPTIONS, SERVICE_SET_OPTIONS_SCHEMA,
|
||||||
|
'async_set_options'
|
||||||
|
)
|
||||||
|
|
||||||
|
await component.async_add_entities(entities)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class InputSelect(RestoreEntity):
|
||||||
|
"""Representation of a select input."""
|
||||||
|
|
||||||
|
def __init__(self, object_id, name, initial, options, values, icon):
|
||||||
|
"""Initialize a select input."""
|
||||||
|
self.entity_id = ENTITY_ID_FORMAT.format(object_id)
|
||||||
|
self._name = name
|
||||||
|
self._current_option = initial
|
||||||
|
self._options = options
|
||||||
|
self._values = values
|
||||||
|
self._icon = icon
|
||||||
|
|
||||||
|
async def async_added_to_hass(self):
|
||||||
|
"""Run when entity about to be added."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
if self._current_option is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
state = await self.async_get_last_state()
|
||||||
|
if not state or state.state not in self._options:
|
||||||
|
self._current_option = self._options[0]
|
||||||
|
else:
|
||||||
|
self._current_option = state.state
|
||||||
|
|
||||||
|
@property
|
||||||
|
def should_poll(self):
|
||||||
|
"""If entity should be polled."""
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return the name of the select input."""
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon(self):
|
||||||
|
"""Return the icon to be used for this entity."""
|
||||||
|
return self._icon
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
"""Return the state of the component."""
|
||||||
|
return self._current_option
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state_attributes(self):
|
||||||
|
"""Return the state attributes."""
|
||||||
|
return {
|
||||||
|
ATTR_OPTIONS: self._options,
|
||||||
|
ATTR_VALUES: self._values,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def async_select_option(self, option):
|
||||||
|
"""Select new option."""
|
||||||
|
if option not in self._options:
|
||||||
|
_LOGGER.warning('Invalid option: %s (possible options: %s)',
|
||||||
|
option, ', '.join(self._options))
|
||||||
|
return
|
||||||
|
self._current_option = option
|
||||||
|
await self.async_update_ha_state()
|
||||||
|
|
||||||
|
async def async_offset_index(self, offset):
|
||||||
|
"""Offset current index."""
|
||||||
|
current_index = self._options.index(self._current_option)
|
||||||
|
new_index = (current_index + offset) % len(self._options)
|
||||||
|
self._current_option = self._options[new_index]
|
||||||
|
await self.async_update_ha_state()
|
||||||
|
|
||||||
|
async def async_set_options(self, options):
|
||||||
|
"""Set options."""
|
||||||
|
self._current_option = options[0]
|
||||||
|
self._options = options
|
||||||
|
await self.async_update_ha_state()
|
Binary file not shown.
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"domain": "input_select",
|
||||||
|
"name": "Input select",
|
||||||
|
"documentation": "https://www.home-assistant.io/components/input_select",
|
||||||
|
"requirements": [],
|
||||||
|
"dependencies": [],
|
||||||
|
"codeowners": [
|
||||||
|
"@home-assistant/core"
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
I have modified the input_select to my liking
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
select_next:
|
||||||
|
description: Select the next options of an input select entity.
|
||||||
|
fields:
|
||||||
|
entity_id: {description: Entity id of the input select to select the next value
|
||||||
|
for., example: input_select.my_select}
|
||||||
|
select_option:
|
||||||
|
description: Select an option of an input select entity.
|
||||||
|
fields:
|
||||||
|
entity_id: {description: Entity id of the input select to select the value., example: input_select.my_select}
|
||||||
|
option: {description: Option to be selected., example: '"Item A"'}
|
||||||
|
select_previous:
|
||||||
|
description: Select the previous options of an input select entity.
|
||||||
|
fields:
|
||||||
|
entity_id: {description: Entity id of the input select to select the previous
|
||||||
|
value for., example: input_select.my_select}
|
||||||
|
set_options:
|
||||||
|
description: Set the options of an input select entity.
|
||||||
|
fields:
|
||||||
|
entity_id: {description: Entity id of the input select to set the new options
|
||||||
|
for., example: input_select.my_select}
|
||||||
|
options: {description: Options for the input select entity., example: '["Item
|
||||||
|
A", "Item B", "Item C"]'}
|
|
@ -0,0 +1,453 @@
|
||||||
|
"""Allows the creation of generic variable entities."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Union, Sequence
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.core import callback
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.config_validation import make_entity_service_schema
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, CONF_VALUE_TEMPLATE,
|
||||||
|
CONF_ICON, CONF_ICON_TEMPLATE, ATTR_ENTITY_PICTURE,
|
||||||
|
CONF_ENTITY_PICTURE_TEMPLATE, ATTR_ENTITY_ID,
|
||||||
|
EVENT_HOMEASSISTANT_START, CONF_FRIENDLY_NAME_TEMPLATE, MATCH_ALL,
|
||||||
|
EVENT_STATE_CHANGED)
|
||||||
|
from homeassistant.exceptions import TemplateError
|
||||||
|
from homeassistant.helpers.entity import async_generate_entity_id
|
||||||
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
|
from homeassistant.helpers.event import async_track_state_change
|
||||||
|
from homeassistant.helpers.json import JSONEncoder
|
||||||
|
from homeassistant.components import recorder
|
||||||
|
from homeassistant.components.recorder.models import Events
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DOMAIN = 'var'
|
||||||
|
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||||
|
|
||||||
|
CONF_INITIAL_VALUE = 'initial_value'
|
||||||
|
CONF_RESTORE = 'restore'
|
||||||
|
CONF_FORCE_UPDATE = 'force_update'
|
||||||
|
CONF_QUERY = 'query'
|
||||||
|
CONF_COLUMN = 'column'
|
||||||
|
CONF_TRACKED_ENTITY_ID = 'tracked_entity_id'
|
||||||
|
CONF_TRACKED_EVENT_TYPE = 'tracked_event_type'
|
||||||
|
|
||||||
|
ATTR_VALUE = 'value'
|
||||||
|
|
||||||
|
def validate_event_types(value: Union[str, Sequence]) -> Sequence[str]:
|
||||||
|
"""Validate event types."""
|
||||||
|
if value is None:
|
||||||
|
raise vol.Invalid('Event types can not be None')
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = [event_type.strip() for event_type in value.split(',')]
|
||||||
|
|
||||||
|
return [event_type for event_type in value]
|
||||||
|
|
||||||
|
def validate_sql_select(value):
|
||||||
|
"""Validate that value is a SQL SELECT query."""
|
||||||
|
if not value.lstrip().lower().startswith('select'):
|
||||||
|
raise vol.Invalid('Only SELECT queries allowed')
|
||||||
|
return value
|
||||||
|
|
||||||
|
SERVICE_SET = "set"
|
||||||
|
SERVICE_SET_SCHEMA = make_entity_service_schema({
|
||||||
|
vol.Optional(ATTR_VALUE): cv.match_all,
|
||||||
|
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(CONF_QUERY): vol.All(cv.string, validate_sql_select),
|
||||||
|
vol.Optional(CONF_COLUMN): cv.string,
|
||||||
|
vol.Optional(ATTR_UNIT_OF_MEASUREMENT): cv.string,
|
||||||
|
vol.Optional(CONF_RESTORE): cv.boolean,
|
||||||
|
vol.Optional(CONF_FORCE_UPDATE): cv.boolean,
|
||||||
|
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
|
||||||
|
vol.Optional(CONF_FRIENDLY_NAME_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(CONF_ICON): cv.icon,
|
||||||
|
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(ATTR_ENTITY_PICTURE): cv.string,
|
||||||
|
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(CONF_TRACKED_ENTITY_ID): cv.entity_ids,
|
||||||
|
vol.Optional(CONF_TRACKED_EVENT_TYPE): validate_event_types,
|
||||||
|
})
|
||||||
|
|
||||||
|
SERVICE_UPDATE = "update"
|
||||||
|
SERVICE_UPDATE_SCHEMA = make_entity_service_schema({})
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = vol.Schema({
|
||||||
|
DOMAIN: vol.Schema({
|
||||||
|
cv.slug: vol.Any({
|
||||||
|
vol.Optional(CONF_INITIAL_VALUE): cv.match_all,
|
||||||
|
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(CONF_QUERY): vol.All(cv.string, validate_sql_select),
|
||||||
|
vol.Optional(CONF_COLUMN): cv.string,
|
||||||
|
vol.Optional(ATTR_UNIT_OF_MEASUREMENT): cv.string,
|
||||||
|
vol.Optional(CONF_RESTORE): cv.boolean,
|
||||||
|
vol.Optional(CONF_FORCE_UPDATE): cv.boolean,
|
||||||
|
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
|
||||||
|
vol.Optional(CONF_FRIENDLY_NAME_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(CONF_ICON): cv.icon,
|
||||||
|
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(ATTR_ENTITY_PICTURE): cv.string,
|
||||||
|
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(CONF_TRACKED_ENTITY_ID): cv.entity_ids,
|
||||||
|
vol.Optional(CONF_TRACKED_EVENT_TYPE): validate_event_types,
|
||||||
|
}, None)
|
||||||
|
})
|
||||||
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
async def async_setup(hass, config):
|
||||||
|
"""Set up variables from config."""
|
||||||
|
component = EntityComponent(_LOGGER, DOMAIN, hass)
|
||||||
|
await component.async_setup(config)
|
||||||
|
|
||||||
|
entities = []
|
||||||
|
|
||||||
|
for object_id, cfg in config[DOMAIN].items():
|
||||||
|
if not cfg:
|
||||||
|
cfg = {}
|
||||||
|
|
||||||
|
initial_value = cfg.get(CONF_INITIAL_VALUE)
|
||||||
|
unit = cfg.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||||
|
restore = cfg.get(CONF_RESTORE, True)
|
||||||
|
force_update = cfg.get(CONF_FORCE_UPDATE, False)
|
||||||
|
friendly_name = cfg.get(ATTR_FRIENDLY_NAME, object_id)
|
||||||
|
icon = cfg.get(CONF_ICON)
|
||||||
|
entity_picture = cfg.get(ATTR_ENTITY_PICTURE)
|
||||||
|
|
||||||
|
value_template = cfg.get(CONF_VALUE_TEMPLATE)
|
||||||
|
friendly_name_template = cfg.get(CONF_FRIENDLY_NAME_TEMPLATE)
|
||||||
|
icon_template = cfg.get(CONF_ICON_TEMPLATE)
|
||||||
|
entity_picture_template = cfg.get(CONF_ENTITY_PICTURE_TEMPLATE)
|
||||||
|
for template in (value_template,
|
||||||
|
icon_template,
|
||||||
|
entity_picture_template,
|
||||||
|
friendly_name_template,
|
||||||
|
):
|
||||||
|
if template is not None:
|
||||||
|
template.hass = hass
|
||||||
|
|
||||||
|
manual_entity_ids = cfg.get(CONF_TRACKED_ENTITY_ID)
|
||||||
|
|
||||||
|
tracked_entity_ids = list()
|
||||||
|
if manual_entity_ids is not None:
|
||||||
|
tracked_entity_ids = list(set(manual_entity_ids))
|
||||||
|
|
||||||
|
tracked_event_types = cfg.get(CONF_TRACKED_EVENT_TYPE)
|
||||||
|
if tracked_event_types is not None:
|
||||||
|
tracked_event_types = list(set(tracked_event_types))
|
||||||
|
|
||||||
|
query = cfg.get(CONF_QUERY)
|
||||||
|
column = cfg.get(CONF_COLUMN)
|
||||||
|
|
||||||
|
session = hass.data[recorder.DATA_INSTANCE].get_session()
|
||||||
|
|
||||||
|
entities.append(
|
||||||
|
Variable(
|
||||||
|
hass,
|
||||||
|
object_id,
|
||||||
|
initial_value,
|
||||||
|
value_template,
|
||||||
|
session,
|
||||||
|
query,
|
||||||
|
column,
|
||||||
|
unit,
|
||||||
|
restore,
|
||||||
|
force_update,
|
||||||
|
friendly_name,
|
||||||
|
friendly_name_template,
|
||||||
|
icon,
|
||||||
|
icon_template,
|
||||||
|
entity_picture,
|
||||||
|
entity_picture_template,
|
||||||
|
tracked_entity_ids,
|
||||||
|
tracked_event_types)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not entities:
|
||||||
|
return False
|
||||||
|
|
||||||
|
component.async_register_entity_service(
|
||||||
|
SERVICE_SET, SERVICE_SET_SCHEMA,
|
||||||
|
'async_set'
|
||||||
|
)
|
||||||
|
|
||||||
|
component.async_register_entity_service(
|
||||||
|
SERVICE_UPDATE, SERVICE_UPDATE_SCHEMA,
|
||||||
|
'async_force_update'
|
||||||
|
)
|
||||||
|
|
||||||
|
await component.async_add_entities(entities)
|
||||||
|
return True
|
||||||
|
|
||||||
|
class Variable(RestoreEntity):
|
||||||
|
"""Representation of a variable."""
|
||||||
|
|
||||||
|
def __init__(self, hass, object_id, initial_value, value_template,
|
||||||
|
session, query, column, unit, restore, force_update,
|
||||||
|
friendly_name, friendly_name_template, icon,
|
||||||
|
icon_template, entity_picture, entity_picture_template,
|
||||||
|
tracked_entity_ids, tracked_event_types):
|
||||||
|
"""Initialize a variable."""
|
||||||
|
self.hass = hass
|
||||||
|
self.entity_id = ENTITY_ID_FORMAT.format(object_id)
|
||||||
|
self._value = initial_value
|
||||||
|
self._initial_value = initial_value
|
||||||
|
self._value_template = value_template
|
||||||
|
self._session = session
|
||||||
|
if query is not None and not 'LIMIT' in query:
|
||||||
|
self._query = query.replace(";", " LIMIT 1;")
|
||||||
|
else:
|
||||||
|
self._query = query
|
||||||
|
self._column = column
|
||||||
|
self._unit = unit
|
||||||
|
self._restore = restore
|
||||||
|
self._force_update = force_update
|
||||||
|
self._friendly_name = friendly_name
|
||||||
|
self._friendly_name_template = friendly_name_template
|
||||||
|
self._icon = icon
|
||||||
|
self._icon_template = icon_template
|
||||||
|
self._entity_picture = entity_picture
|
||||||
|
self._entity_picture_template = entity_picture_template
|
||||||
|
self._tracked_entity_ids = tracked_entity_ids
|
||||||
|
self._stop_track_state_change = None
|
||||||
|
self._tracked_event_types = tracked_event_types
|
||||||
|
self._stop_track_events = []
|
||||||
|
|
||||||
|
def _is_event_in_db(self, event):
|
||||||
|
"""Query the database to see if the event has been written."""
|
||||||
|
event_id = self._session.query(Events.event_id).filter_by(
|
||||||
|
event_type=event.event_type, time_fired=event.time_fired,
|
||||||
|
event_data=json.dumps(event.data, cls=JSONEncoder)).scalar()
|
||||||
|
return event_id is not None
|
||||||
|
|
||||||
|
def _get_variable_event_listener(self):
|
||||||
|
@callback
|
||||||
|
def listener(event):
|
||||||
|
"""Update variable once monitored event fires and is recorded to the database."""
|
||||||
|
# Skip untracked state changes
|
||||||
|
if (event.event_type == EVENT_STATE_CHANGED and
|
||||||
|
self._tracked_entity_ids is not None and
|
||||||
|
event.data['entity_id'] not in self._tracked_entity_ids):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Schedule update immediately if there is no query
|
||||||
|
if self._query is None:
|
||||||
|
self.async_schedule_update_ha_state(True)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Otherwise poll the database scheduling update once event has been committed
|
||||||
|
async def update_var():
|
||||||
|
"""Poll the database until the event shows up."""
|
||||||
|
while not self._is_event_in_db(event):
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
self.async_schedule_update_ha_state(True)
|
||||||
|
|
||||||
|
self.hass.add_job(update_var)
|
||||||
|
return listener
|
||||||
|
|
||||||
|
async def async_added_to_hass(self):
|
||||||
|
"""Register callbacks."""
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def variable_startup(event):
|
||||||
|
"""Update variable event listeners on startup."""
|
||||||
|
if self._tracked_entity_ids is not None:
|
||||||
|
listener = self._get_variable_event_listener()
|
||||||
|
stop = self.hass.bus.async_listen(EVENT_STATE_CHANGED, listener)
|
||||||
|
self._stop_track_state_change = stop
|
||||||
|
if self._tracked_event_types is not None:
|
||||||
|
listener = self._get_variable_event_listener()
|
||||||
|
for event_type in self._tracked_event_types:
|
||||||
|
stop = self.hass.bus.async_listen(event_type, listener)
|
||||||
|
self._stop_track_events.append(stop)
|
||||||
|
|
||||||
|
self.hass.bus.async_listen_once(
|
||||||
|
EVENT_HOMEASSISTANT_START, variable_startup)
|
||||||
|
|
||||||
|
# Restore previous value on startup
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
if self._restore == True:
|
||||||
|
state = await self.async_get_last_state()
|
||||||
|
if state:
|
||||||
|
self._value = state.state
|
||||||
|
|
||||||
|
@property
|
||||||
|
def should_poll(self):
|
||||||
|
"""If entity should be polled."""
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def force_update(self):
|
||||||
|
"""Return True if state updates should be forced.
|
||||||
|
If True, a state change will be triggered anytime the state property is
|
||||||
|
updated, not just when the value changes.
|
||||||
|
"""
|
||||||
|
return self._force_update
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return the name of the variable."""
|
||||||
|
return self._friendly_name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon(self):
|
||||||
|
"""Return the icon to be used for this entity."""
|
||||||
|
return self._icon
|
||||||
|
|
||||||
|
@property
|
||||||
|
def entity_picture(self):
|
||||||
|
"""Return the entity_picture to be used for this entity."""
|
||||||
|
return self._entity_picture
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
"""Return the state of the component."""
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unit_of_measurement(self):
|
||||||
|
"""Return the unit the value is expressed in."""
|
||||||
|
return self._unit
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _templates_dict(self):
|
||||||
|
return {'_value': self._value_template,
|
||||||
|
'_name': self._friendly_name_template,
|
||||||
|
'_icon': self._icon_template,
|
||||||
|
'_entity_picture': self._entity_picture_template}
|
||||||
|
|
||||||
|
async def async_set(self,
|
||||||
|
value=None,
|
||||||
|
value_template=None,
|
||||||
|
query=None,
|
||||||
|
column=None,
|
||||||
|
unit=None,
|
||||||
|
restore=None,
|
||||||
|
force_update=None,
|
||||||
|
friendly_name=None,
|
||||||
|
friendly_name_template=None,
|
||||||
|
icon=None,
|
||||||
|
icon_template=None,
|
||||||
|
entity_picture=None,
|
||||||
|
entity_picture_template=None,
|
||||||
|
manual_tracked_entity_ids=None,
|
||||||
|
tracked_event_types=None):
|
||||||
|
"""Set new attributes for the variable."""
|
||||||
|
if value is not None:
|
||||||
|
self._value = value
|
||||||
|
if unit is not None:
|
||||||
|
self._unit = unit
|
||||||
|
if restore is not None:
|
||||||
|
self._restore = restore
|
||||||
|
if force_update is not None:
|
||||||
|
self._force_update = force_update
|
||||||
|
if friendly_name is not None:
|
||||||
|
self._friendly_name = friendly_name
|
||||||
|
if icon is not None:
|
||||||
|
self._icon = icon
|
||||||
|
if entity_picture is not None:
|
||||||
|
self._entity_picture = entity_picture
|
||||||
|
templates_dict = {
|
||||||
|
'_value': value_template,
|
||||||
|
'_name': friendly_name_template,
|
||||||
|
'_icon': icon_template,
|
||||||
|
'_entity_picture': entity_picture_template}
|
||||||
|
for property_name, template in templates_dict.items():
|
||||||
|
if template is not None:
|
||||||
|
template.hass = self.hass
|
||||||
|
setattr(self, property_name, template.async_render())
|
||||||
|
if query is not None:
|
||||||
|
self._query = query
|
||||||
|
if column is not None:
|
||||||
|
self._column = column
|
||||||
|
|
||||||
|
tracked_entity_ids = None
|
||||||
|
if manual_tracked_entity_ids is not None:
|
||||||
|
tracked_entity_ids = manual_tracked_entity_ids
|
||||||
|
|
||||||
|
if tracked_entity_ids is not None:
|
||||||
|
if self._stop_track_state_change:
|
||||||
|
self._stop_track_state_change()
|
||||||
|
self._tracked_entity_ids = tracked_entity_ids
|
||||||
|
listener = self._get_variable_event_listener()
|
||||||
|
stop = self.hass.bus.async_listen(EVENT_STATE_CHANGED, listener)
|
||||||
|
self._stop_track_state_change = stop
|
||||||
|
|
||||||
|
if tracked_event_types is not None:
|
||||||
|
if self._stop_track_events:
|
||||||
|
for stop in self._stop_track_events:
|
||||||
|
stop()
|
||||||
|
self._tracked_event_types = tracked_event_types
|
||||||
|
listener = self._get_variable_event_listener()
|
||||||
|
for event_type in self._tracked_event_types:
|
||||||
|
stop = self.hass.bus.async_listen(event_type, listener)
|
||||||
|
self._stop_track_events.append(stop)
|
||||||
|
|
||||||
|
await self.async_update_ha_state()
|
||||||
|
|
||||||
|
async def async_force_update(self):
|
||||||
|
await self.async_update_ha_state(True)
|
||||||
|
|
||||||
|
async def async_update(self):
|
||||||
|
"""Update the state and attributes from the templates."""
|
||||||
|
|
||||||
|
# Run the db query
|
||||||
|
db_value = None
|
||||||
|
if self._query is not None:
|
||||||
|
import sqlalchemy
|
||||||
|
try:
|
||||||
|
result = self._session.execute(self._query)
|
||||||
|
|
||||||
|
if not result.returns_rows or result.rowcount == 0:
|
||||||
|
_LOGGER.warning("%s returned no results", self._query)
|
||||||
|
self._state = None
|
||||||
|
return
|
||||||
|
|
||||||
|
for res in result:
|
||||||
|
_LOGGER.debug("result = %s", res.items())
|
||||||
|
db_value = res[self._column]
|
||||||
|
except sqlalchemy.exc.SQLAlchemyError as err:
|
||||||
|
_LOGGER.error("Error executing query %s: %s", self._query, err)
|
||||||
|
return
|
||||||
|
finally:
|
||||||
|
self._session.close()
|
||||||
|
|
||||||
|
# Update the state and attributes from their templates
|
||||||
|
for property_name, template in self._templates_dict.items():
|
||||||
|
if property_name != '_value' and template is None:
|
||||||
|
continue
|
||||||
|
elif property_name == '_value' and template is None and db_value is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
rendered_template = None
|
||||||
|
if template is not None:
|
||||||
|
if db_value is not None:
|
||||||
|
rendered_template = template.async_render_with_possible_json_value(db_value, None)
|
||||||
|
else:
|
||||||
|
rendered_template = template.async_render()
|
||||||
|
|
||||||
|
if rendered_template is not None:
|
||||||
|
setattr(self, property_name, rendered_template)
|
||||||
|
elif property_name == '_value' and db_value is not None:
|
||||||
|
setattr(self, property_name, db_value)
|
||||||
|
except TemplateError as ex:
|
||||||
|
friendly_property_name = property_name[1:].replace('_', ' ')
|
||||||
|
if ex.args and ex.args[0].startswith(
|
||||||
|
"UndefinedError: 'None' has no attribute"):
|
||||||
|
# Common during HA startup - so just a warning
|
||||||
|
_LOGGER.warning('Could not render %s template %s,'
|
||||||
|
' the state is unknown.',
|
||||||
|
friendly_property_name, self._friendly_name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
setattr(self, property_name, getattr(super(), property_name))
|
||||||
|
except AttributeError:
|
||||||
|
_LOGGER.error('Could not render %s template %s: %s',
|
||||||
|
friendly_property_name, self._friendly_name, ex)
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"domain": "var",
|
||||||
|
"name": "Variable",
|
||||||
|
"documentation": "https://github.com/snarky-snark/home-assistant-variables/blob/master/README.md",
|
||||||
|
"dependencies": ["recorder"],
|
||||||
|
"codeowners": ["@snarky-snark"],
|
||||||
|
"requirements": []
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
set:
|
||||||
|
description: Set attributes of a variable entity.
|
||||||
|
fields:
|
||||||
|
entity_id:
|
||||||
|
description: Entity id of the variable whose value will be set.
|
||||||
|
example: var.x
|
||||||
|
value:
|
||||||
|
description: The new value for the variable entity.
|
||||||
|
example: 13
|
||||||
|
value_template:
|
||||||
|
description: The new value template for the variable entity.
|
||||||
|
example: "{{ input_boolean.foo }}"
|
||||||
|
tracked_entity_id:
|
||||||
|
description: The new list of entities for the variable entity to track.
|
||||||
|
example: input_boolean.bar
|
||||||
|
tracked_event_type:
|
||||||
|
description: The new list of event types for the variable entity to track.
|
||||||
|
example: sunset
|
||||||
|
query:
|
||||||
|
description: An SQL QUERY string, should return 1 result at most.
|
||||||
|
example: "SELECT COUNT(*) AS todays_diaper_count FROM events WHERE event_type = 'diaper_event' AND time_fired BETWEEN DATETIME('now', 'start of day') and DATETIME('now');"
|
||||||
|
column:
|
||||||
|
description: The SQL COLUMN to select from the result of the SQL QUERY.
|
||||||
|
example: 'todays_diaper_count'
|
||||||
|
restore:
|
||||||
|
description: The new restore value for the variable entity.
|
||||||
|
example: false
|
||||||
|
force_update:
|
||||||
|
description: The new force_update value for the variable entity.
|
||||||
|
example: true
|
||||||
|
unit_of_measurement:
|
||||||
|
description: The new unit of measurement for the variable entity.
|
||||||
|
example: 'ounces'
|
||||||
|
icon:
|
||||||
|
description: The new icon for the variable entity.
|
||||||
|
example: 'mdi:baby-bottle-outline'
|
||||||
|
icon_template:
|
||||||
|
description: The new icon template for the variable entity.
|
||||||
|
entity_picture:
|
||||||
|
description: The new picture for the variable entity.
|
||||||
|
example: 'local/pic.png'
|
||||||
|
entity_picture_template:
|
||||||
|
description: The new picture template for the variable entity.
|
||||||
|
update:
|
||||||
|
description: Force a variable to update its state and atttributes.
|
||||||
|
fields:
|
||||||
|
entity_id:
|
||||||
|
description: Entity id of the variable that will be updated.
|
||||||
|
example: var.x
|
|
@ -0,0 +1,207 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.const import (CONF_NAME, ATTR_ICON)
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers import template
|
||||||
|
from homeassistant.exceptions import TemplateError
|
||||||
|
from homeassistant.loader import bind_hass
|
||||||
|
from homeassistant.helpers.entity import Entity
|
||||||
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DOMAIN = 'variable'
|
||||||
|
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||||
|
|
||||||
|
CONF_ATTRIBUTES = "attributes"
|
||||||
|
CONF_VALUE = "value"
|
||||||
|
CONF_RESTORE = "restore"
|
||||||
|
|
||||||
|
ATTR_VARIABLE = "variable"
|
||||||
|
ATTR_VALUE = 'value'
|
||||||
|
ATTR_VALUE_TEMPLATE = 'value_template'
|
||||||
|
ATTR_ATTRIBUTES = "attributes"
|
||||||
|
ATTR_ATTRIBUTES_TEMPLATE = "attributes_template"
|
||||||
|
ATTR_REPLACE_ATTRIBUTES = "replace_attributes"
|
||||||
|
|
||||||
|
SERVICE_SET_VARIABLE = "set_variable"
|
||||||
|
SERVICE_SET_VARIABLE_SCHEMA = vol.Schema({
|
||||||
|
vol.Required(ATTR_VARIABLE): cv.string,
|
||||||
|
vol.Optional(ATTR_VALUE): cv.match_all,
|
||||||
|
vol.Optional(ATTR_VALUE_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(ATTR_ATTRIBUTES): dict,
|
||||||
|
vol.Optional(ATTR_ATTRIBUTES_TEMPLATE): cv.template,
|
||||||
|
vol.Optional(ATTR_REPLACE_ATTRIBUTES): cv.boolean
|
||||||
|
})
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = vol.Schema({
|
||||||
|
DOMAIN: vol.Schema({
|
||||||
|
cv.slug: vol.Any({
|
||||||
|
vol.Optional(CONF_NAME): cv.string,
|
||||||
|
vol.Optional(CONF_VALUE): cv.match_all,
|
||||||
|
vol.Optional(CONF_ATTRIBUTES): dict,
|
||||||
|
vol.Optional(CONF_RESTORE): cv.boolean,
|
||||||
|
}, None)
|
||||||
|
})
|
||||||
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
@bind_hass
|
||||||
|
def set_variable(hass, variable, value, value_template, attributes, attributes_template, replace_attributes):
|
||||||
|
"""Set input_boolean to True."""
|
||||||
|
hass.services.call(DOMAIN, SERVICE_SET_VARIABLE, {
|
||||||
|
ATTR_VARIABLE: variable,
|
||||||
|
ATTR_VALUE: value,
|
||||||
|
ATTR_VALUE_TEMPLATE: value_template,
|
||||||
|
ATTR_ATTRIBUTES: attributes,
|
||||||
|
ATTR_ATTRIBUTES_TEMPLATE: attributes_template,
|
||||||
|
ATTR_REPLACE_ATTRIBUTES: replace_attributes,
|
||||||
|
})
|
||||||
|
|
||||||
|
async def async_setup(hass, config):
|
||||||
|
"""Set up variables."""
|
||||||
|
component = EntityComponent(_LOGGER, DOMAIN, hass)
|
||||||
|
|
||||||
|
entities = []
|
||||||
|
|
||||||
|
for variable_id, variable_config in config[DOMAIN].items():
|
||||||
|
if not variable_config:
|
||||||
|
variable_config = {}
|
||||||
|
|
||||||
|
name = variable_config.get(CONF_NAME)
|
||||||
|
value = variable_config.get(CONF_VALUE)
|
||||||
|
attributes = variable_config.get(CONF_ATTRIBUTES)
|
||||||
|
restore = variable_config.get(CONF_RESTORE, False)
|
||||||
|
|
||||||
|
entities.append(Variable(variable_id, name, value, attributes, restore))
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def async_set_variable_service(call):
|
||||||
|
"""Handle calls to the set_variable service."""
|
||||||
|
|
||||||
|
entity_id = ENTITY_ID_FORMAT.format(call.data.get(ATTR_VARIABLE))
|
||||||
|
entity = component.get_entity(entity_id)
|
||||||
|
|
||||||
|
if entity:
|
||||||
|
target_variables = [ entity ]
|
||||||
|
tasks = [variable.async_set_variable(
|
||||||
|
call.data.get(ATTR_VALUE),
|
||||||
|
call.data.get(ATTR_VALUE_TEMPLATE),
|
||||||
|
call.data.get(ATTR_ATTRIBUTES),
|
||||||
|
call.data.get(ATTR_ATTRIBUTES_TEMPLATE),
|
||||||
|
call.data.get(ATTR_REPLACE_ATTRIBUTES, False))
|
||||||
|
for variable in target_variables]
|
||||||
|
if tasks:
|
||||||
|
yield from asyncio.wait(tasks, loop=hass.loop)
|
||||||
|
|
||||||
|
else:
|
||||||
|
_LOGGER.warning('Failed to set unknown variable: %s', entity_id)
|
||||||
|
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SERVICE_SET_VARIABLE, async_set_variable_service,
|
||||||
|
schema=SERVICE_SET_VARIABLE_SCHEMA)
|
||||||
|
|
||||||
|
await component.async_add_entities(entities)
|
||||||
|
return True
|
||||||
|
|
||||||
|
class Variable(RestoreEntity):
|
||||||
|
"""Representation of a variable."""
|
||||||
|
|
||||||
|
def __init__(self, variable_id, name, value, attributes, restore):
|
||||||
|
"""Initialize a variable."""
|
||||||
|
self.entity_id = ENTITY_ID_FORMAT.format(variable_id)
|
||||||
|
self._name = name
|
||||||
|
self._value = value
|
||||||
|
self._attributes = attributes
|
||||||
|
self._restore = restore
|
||||||
|
|
||||||
|
async def async_added_to_hass(self):
|
||||||
|
"""Run when entity about to be added."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
if self._restore == True:
|
||||||
|
state = await self.async_get_last_state()
|
||||||
|
if state:
|
||||||
|
self._value = state.state
|
||||||
|
|
||||||
|
@property
|
||||||
|
def should_poll(self):
|
||||||
|
"""If entity should be polled."""
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return the name of the variable."""
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon(self):
|
||||||
|
"""Return the icon to be used for this entity."""
|
||||||
|
if self._attributes is not None:
|
||||||
|
return self._attributes.get(ATTR_ICON)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
"""Return the state of the component."""
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state_attributes(self):
|
||||||
|
"""Return the state attributes."""
|
||||||
|
return self._attributes
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def async_set_variable(self, value, value_template, attributes, attributes_template, replace_attributes):
|
||||||
|
"""Update variable."""
|
||||||
|
|
||||||
|
current_state = self.hass.states.get(self.entity_id)
|
||||||
|
updated_attributes = None
|
||||||
|
updated_value = None
|
||||||
|
|
||||||
|
if not replace_attributes and self._attributes is not None:
|
||||||
|
updated_attributes = dict(self._attributes)
|
||||||
|
|
||||||
|
if attributes is not None:
|
||||||
|
if updated_attributes is not None:
|
||||||
|
updated_attributes.update(attributes)
|
||||||
|
else:
|
||||||
|
updated_attributes = attributes
|
||||||
|
|
||||||
|
elif attributes_template is not None:
|
||||||
|
attributes_template.hass = self.hass
|
||||||
|
|
||||||
|
try:
|
||||||
|
attributes = json.loads(attributes_template.async_render({ 'variable': current_state }))
|
||||||
|
|
||||||
|
if isinstance(attributes, dict):
|
||||||
|
if updated_attributes is not None:
|
||||||
|
updated_attributes.update(attributes)
|
||||||
|
else:
|
||||||
|
updated_attributes = attributes
|
||||||
|
|
||||||
|
except TemplateError as ex:
|
||||||
|
_LOGGER.error('Could not render attribute_template %s: %s',
|
||||||
|
self.entity_id, ex)
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
|
updated_value = value
|
||||||
|
|
||||||
|
elif value_template is not None:
|
||||||
|
try:
|
||||||
|
value_template.hass = self.hass
|
||||||
|
updated_value = value_template.async_render({ 'variable': current_state })
|
||||||
|
except TemplateError as ex:
|
||||||
|
_LOGGER.error('Could not render value_template %s: %s',
|
||||||
|
self.entity_id, ex)
|
||||||
|
|
||||||
|
self._attributes = updated_attributes;
|
||||||
|
|
||||||
|
if updated_value is not None:
|
||||||
|
self._value = updated_value;
|
||||||
|
|
||||||
|
yield from self.async_update_ha_state()
|
Binary file not shown.
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"domain": "variable",
|
||||||
|
"name": "variable",
|
||||||
|
"documentation": "https://github.com/rogro82/hass-variables",
|
||||||
|
"requirements": [],
|
||||||
|
"dependencies": [],
|
||||||
|
"codeowners": [
|
||||||
|
"@rogro82"
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
# Example services.yaml entry
|
||||||
|
|
||||||
|
set_variable:
|
||||||
|
# Description of the service
|
||||||
|
description: Update a variables value and/or its attributes.
|
||||||
|
# Different fields that your service accepts
|
||||||
|
fields:
|
||||||
|
# Key of the field
|
||||||
|
variable:
|
||||||
|
description: string (required) The name of the variable to update
|
||||||
|
value:
|
||||||
|
description: any (optional) New value to set
|
||||||
|
value_template:
|
||||||
|
description: template (optional) New value to set from a template
|
||||||
|
attributes:
|
||||||
|
description: dictionary (optional) Attributes to set or update
|
||||||
|
attributes_template:
|
||||||
|
description: template (optional) Attributes to set or update from a template ( should return a json object )
|
||||||
|
replace_attributes:
|
||||||
|
description: boolean ( optional ) Replace or merge current attributes (default false = merge)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
# Gitignore settings for ESPHome
|
||||||
|
# This is an example and may include too much for your use-case.
|
||||||
|
# You can modify this file to suit your needs.
|
||||||
|
/.esphome/
|
||||||
|
**/.pioenvs/
|
||||||
|
**/.piolibdeps/
|
||||||
|
**/lib/
|
||||||
|
**/src/
|
||||||
|
**/platformio.ini
|
||||||
|
/secrets.yaml
|
|
@ -0,0 +1,37 @@
|
||||||
|
esphome:
|
||||||
|
name: outside_temp_humd
|
||||||
|
platform: ESP8266
|
||||||
|
board: d1_mini
|
||||||
|
|
||||||
|
wifi:
|
||||||
|
ssid: "zoesplace"
|
||||||
|
password: "PalDgk19591960"
|
||||||
|
|
||||||
|
# Enable fallback hotspot (captive portal) in case wifi connection fails
|
||||||
|
ap:
|
||||||
|
ssid: "Outside-Temp-Humd"
|
||||||
|
password: "1kmZsOcaCm44"
|
||||||
|
|
||||||
|
captive_portal:
|
||||||
|
|
||||||
|
# Enable logging
|
||||||
|
logger:
|
||||||
|
|
||||||
|
# Enable Home Assistant API
|
||||||
|
api:
|
||||||
|
|
||||||
|
ota:
|
||||||
|
|
||||||
|
i2c:
|
||||||
|
sda: D2
|
||||||
|
scl: D1
|
||||||
|
scan: True
|
||||||
|
id: bus_a
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: dht12
|
||||||
|
temperature:
|
||||||
|
name: "Outside Temperature"
|
||||||
|
humidity:
|
||||||
|
name: "Outside Humidity"
|
||||||
|
update_interval: 60s
|
|
@ -0,0 +1,10 @@
|
||||||
|
title: Video Cameras
|
||||||
|
icon: mdi:video
|
||||||
|
cards:
|
||||||
|
- type: markdown
|
||||||
|
content: >
|
||||||
|
## Roo Cam 1
|
||||||
|
- type: picture
|
||||||
|
image: http://doggie_cam_1.kebler.net
|
||||||
|
# - type: picture-entity
|
||||||
|
# entity: camera.roo_cam_1
|
|
@ -0,0 +1,12 @@
|
||||||
|
title: Electronics Closet
|
||||||
|
name: Closet
|
||||||
|
icon: mdi:door
|
||||||
|
cards:
|
||||||
|
- type: entities
|
||||||
|
show_header_toggle: false
|
||||||
|
title: Temperature Control
|
||||||
|
entities:
|
||||||
|
- sensor.closet_temperature
|
||||||
|
- input_number.fan_on_temp
|
||||||
|
- switch.fan_automation_enabled
|
||||||
|
- fan.fan_state
|
|
@ -0,0 +1,33 @@
|
||||||
|
title: UCI Base FIO Example
|
||||||
|
cards:
|
||||||
|
- type: vertical-stack
|
||||||
|
cards:
|
||||||
|
- type: markdown
|
||||||
|
content: >
|
||||||
|
# Four In One UCI Base Example
|
||||||
|
- type: horizontal-stack
|
||||||
|
cards:
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.fio_switch_1
|
||||||
|
icon: mdi:water
|
||||||
|
name: Switch 1
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.fio_switch_2
|
||||||
|
icon: mdi:water
|
||||||
|
name: Switch 2
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.fio_switch_3
|
||||||
|
icon: mdi:water
|
||||||
|
name: Switch 3
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.fio_switch_4
|
||||||
|
icon: mdi:water
|
||||||
|
name: Switch 4
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
|
@ -0,0 +1,59 @@
|
||||||
|
title: UCI Gpio Example
|
||||||
|
cards:
|
||||||
|
- type: vertical-stack
|
||||||
|
cards:
|
||||||
|
- type: markdown
|
||||||
|
content: >
|
||||||
|
# Control GPIO Relays on Pine64
|
||||||
|
- type: horizontal-stack
|
||||||
|
cards:
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.pine64_switch_1
|
||||||
|
icon: mdi:water
|
||||||
|
name: Relay 1
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.pine64_switch_2
|
||||||
|
icon: mdi:water
|
||||||
|
name: Relay 2
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.pine64_switch_3
|
||||||
|
icon: mdi:water
|
||||||
|
name: Relay 3
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.pine64_switch_4
|
||||||
|
icon: mdi:water
|
||||||
|
name: Relay 4
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: horizontal-stack
|
||||||
|
cards:
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.pine64_switch_5
|
||||||
|
icon: mdi:water
|
||||||
|
name: Relay 5
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.pine64_switch_6
|
||||||
|
icon: mdi:water
|
||||||
|
name: Relay 6
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.pine64_switch_7
|
||||||
|
icon: mdi:water
|
||||||
|
name: Relay 7
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.pine64_switch_8
|
||||||
|
icon: mdi:water
|
||||||
|
name: Relay 8
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
|
@ -0,0 +1,43 @@
|
||||||
|
# --------------- IRRIGATION -------------
|
||||||
|
title: Irrigation
|
||||||
|
icon: mdi:water
|
||||||
|
cards:
|
||||||
|
- type: vertical-stack
|
||||||
|
cards:
|
||||||
|
- type: markdown
|
||||||
|
content: >
|
||||||
|
# Irrigation
|
||||||
|
- type: vertical-stack
|
||||||
|
cards:
|
||||||
|
- type: vertical-stack
|
||||||
|
cards:
|
||||||
|
- type: markdown
|
||||||
|
content: >
|
||||||
|
## Front Yard North
|
||||||
|
- type: entities
|
||||||
|
entities:
|
||||||
|
- entity: sensor.zone_1_last_run
|
||||||
|
icon: mdi:clock
|
||||||
|
- entity: sensor.zone_1_repeat
|
||||||
|
icon: mdi:timer
|
||||||
|
- entity: sensor.zone_1_next_run
|
||||||
|
icon: mdi:clock
|
||||||
|
- type: horizontal-stack
|
||||||
|
cards:
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.zone_1
|
||||||
|
icon: mdi:water
|
||||||
|
name: on/off
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
entity: switch.zone_1_timer
|
||||||
|
icon: mdi:timer
|
||||||
|
name: timer
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity-button
|
||||||
|
name: set
|
||||||
|
entity: group.zone_1_set
|
||||||
|
tap_action:
|
||||||
|
action: more-info
|
|
@ -0,0 +1,21 @@
|
||||||
|
title: Lights
|
||||||
|
cards:
|
||||||
|
- type: entities
|
||||||
|
show_header_toggle: false
|
||||||
|
title: Raw Sonoff Control
|
||||||
|
entities:
|
||||||
|
- switch.sonoff_white
|
||||||
|
- switch.sonoff_brown
|
||||||
|
- switch.all_sonoffs
|
||||||
|
- type: entities
|
||||||
|
show_header_toggle: false
|
||||||
|
title: Indoor Security Lights
|
||||||
|
entities:
|
||||||
|
- entity: switch.all_sonoffs
|
||||||
|
name: Lights State (manual override)
|
||||||
|
- entity: input_datetime.security_lights_interior_time_off
|
||||||
|
name: Time Of Day To Turn Off (24HR)
|
||||||
|
- entity: automation.security_lights_interior_time_on
|
||||||
|
name: Lights On At Sunset Automation Enabled
|
||||||
|
- entity: automation.security_lights_interior_time_off
|
||||||
|
name: Lights Off Automation Enabled
|
|
@ -0,0 +1,12 @@
|
||||||
|
title: Node Testing
|
||||||
|
cards:
|
||||||
|
- type: vertical-stack
|
||||||
|
cards:
|
||||||
|
# - type: horizontal-stack
|
||||||
|
- type: entities
|
||||||
|
entities:
|
||||||
|
- entity: input_number.node_test_first_number
|
||||||
|
- entity: input_number.node_test_second_number
|
||||||
|
- entity: variable.node_test_sum
|
||||||
|
- entity: variable.node_test_switch
|
||||||
|
name: Dummy Switch (on>100)
|
|
@ -0,0 +1,43 @@
|
||||||
|
# Exampe Scheduler
|
||||||
|
title: Scheduler
|
||||||
|
cards:
|
||||||
|
- type: markdown
|
||||||
|
content: >
|
||||||
|
## Scheduler Example
|
||||||
|
- type: vertical-stack
|
||||||
|
cards:
|
||||||
|
- type: entities
|
||||||
|
entities:
|
||||||
|
# - entity: input_datetime.test_schedule_base
|
||||||
|
- type: section
|
||||||
|
label: 'Base Time of Day'
|
||||||
|
- entity: input_number.test_schedule_base_hour
|
||||||
|
- entity: input_number.test_schedule_base_minute
|
||||||
|
- type: divider
|
||||||
|
- entity: input_select.test_schedule_repeatin
|
||||||
|
# - entity: variable.test_schedule_countdown
|
||||||
|
# - entity: variable.test_schedule_next_timestamp
|
||||||
|
- type: divider
|
||||||
|
- entity: variable.test_schedule_next
|
||||||
|
- entity: sensor.test_schedule_next
|
||||||
|
- entity: variable.test_schedule_countdown
|
||||||
|
- type: horizontal-stack
|
||||||
|
cards:
|
||||||
|
- type: entity-button
|
||||||
|
name: Enable Scheduler
|
||||||
|
icon: mdi:timer
|
||||||
|
tap_action:
|
||||||
|
action: call-service
|
||||||
|
service: script.turn_on
|
||||||
|
service_data:
|
||||||
|
entity_id: script.enable_test_schedule
|
||||||
|
entity: script.enable_test_schedule
|
||||||
|
- type: entity-button
|
||||||
|
name: Disable Scheduler
|
||||||
|
icon: mdi:timer
|
||||||
|
tap_action:
|
||||||
|
action: call-service
|
||||||
|
service: script.turn_on
|
||||||
|
service_data:
|
||||||
|
entity_id: script.disable_test_schedule
|
||||||
|
entity: script.disable_test_schedule
|
|
@ -0,0 +1,24 @@
|
||||||
|
title: Temps - Humidity
|
||||||
|
icon: mdi:thermometer-lines
|
||||||
|
cards:
|
||||||
|
- type: entities
|
||||||
|
show_header_toggle: false
|
||||||
|
title: Outside
|
||||||
|
entities:
|
||||||
|
- sensor.outside_temperature
|
||||||
|
- sensor.outside_humidity
|
||||||
|
- type: weather-forecast
|
||||||
|
entity: weather.praire_city_weather
|
||||||
|
# - type: custom:banner-card
|
||||||
|
# heading:
|
||||||
|
# - mdi:door
|
||||||
|
# - Closet
|
||||||
|
# background: "#B0C2ED"
|
||||||
|
# link: /lovelace/2
|
||||||
|
# entities:
|
||||||
|
# - entity: sensor.closet_temperature
|
||||||
|
# name: Temperature
|
||||||
|
# - entity: input_number.fan_on_temp
|
||||||
|
# name: Set Temperature
|
||||||
|
# - entity: fan.fan_state
|
||||||
|
# name: Fan State
|
|
@ -0,0 +1,30 @@
|
||||||
|
title: Timer
|
||||||
|
cards:
|
||||||
|
- type: vertical-stack
|
||||||
|
cards:
|
||||||
|
- type: horizontal-stack
|
||||||
|
cards:
|
||||||
|
- type: entity-button
|
||||||
|
name: start timer countdown
|
||||||
|
icon: mdi:timer
|
||||||
|
tap_action:
|
||||||
|
action: call-service
|
||||||
|
service: script.turn_on
|
||||||
|
service_data:
|
||||||
|
entity_id: script.start_test_timer
|
||||||
|
entity: script.start_test_timer
|
||||||
|
- type: entity-button
|
||||||
|
name: abort timer
|
||||||
|
icon: mdi:timer-off
|
||||||
|
tap_action:
|
||||||
|
action: call-service
|
||||||
|
service: script.turn_on
|
||||||
|
service_data:
|
||||||
|
entity_id: script.abort_test_timer
|
||||||
|
entity: script.abort_test_timer
|
||||||
|
- type: entities
|
||||||
|
entities:
|
||||||
|
- entity: variable.test_timer_countdown
|
||||||
|
- entity: variable.test_timer_device
|
||||||
|
name: dummy device
|
||||||
|
- entity: input_number.test_timer_duration
|
|
@ -0,0 +1,20 @@
|
||||||
|
title: UCI Lighting System
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
cards:
|
||||||
|
- type: glance
|
||||||
|
entities:
|
||||||
|
- entity: light.panic
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
name: Panic
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- entity: sensor.panic_on_percent
|
||||||
|
icon: mdi:flash
|
||||||
|
name: 'circuits on'
|
||||||
|
show_state: true
|
||||||
|
- type: glance
|
||||||
|
entities:
|
||||||
|
- entity: switch.circuit_1
|
||||||
|
name: Circuit 1
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
|
@ -0,0 +1 @@
|
||||||
|
/archive/
|
|
@ -0,0 +1,30 @@
|
||||||
|
homeassistant:
|
||||||
|
name: 238 McHaley
|
||||||
|
# latitude: !secret latitude_loc_home
|
||||||
|
# longitude: !secret longitude_loc_home
|
||||||
|
# elevation: !secret evelation_loc_home
|
||||||
|
time_zone: America/Los_Angeles
|
||||||
|
# temperature_unit: F
|
||||||
|
# unit_system: metric
|
||||||
|
# customize_glob: !include config/customize_glob.yaml
|
||||||
|
# enables system health component
|
||||||
|
websocket_api:
|
||||||
|
system_health:
|
||||||
|
# Enables the frontend
|
||||||
|
frontend:
|
||||||
|
# Enables configuration UI
|
||||||
|
config:
|
||||||
|
# Show links to resources in log and frontend
|
||||||
|
# introduction:
|
||||||
|
# Discover some devices automatically
|
||||||
|
#discovery:
|
||||||
|
# Allows you to issue voice commands from the frontend in enabled browsers
|
||||||
|
#conversation:
|
||||||
|
# Enables support for tracking state changes over time
|
||||||
|
#history:
|
||||||
|
# View all events in a logbook
|
||||||
|
#logbook:
|
||||||
|
# Enables a map showing the location of tracked devices
|
||||||
|
#map:
|
||||||
|
# Track the sun
|
||||||
|
sun:
|
|
@ -0,0 +1,4 @@
|
||||||
|
camera:
|
||||||
|
- platform: generic
|
||||||
|
name: Roo Cam 1
|
||||||
|
stream_source: "http://doggie_cam_1.kebler.net"
|
|
@ -0,0 +1,70 @@
|
||||||
|
homeassistant:
|
||||||
|
customize:
|
||||||
|
fan.fan_state:
|
||||||
|
friendly_name: 'Fan State (manual override)'
|
||||||
|
sensor.closet_temperature:
|
||||||
|
friendly_name: 'Closet Temperature (C)'
|
||||||
|
input_number.fan_on_temp:
|
||||||
|
friendly_name: 'Fan On Set Temperature (C)'
|
||||||
|
|
||||||
|
# CLOSET ENTITIES
|
||||||
|
fan:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "fan state"
|
||||||
|
state_topic: "closet/status/fan/"
|
||||||
|
command_topic: "closet/fan"
|
||||||
|
|
||||||
|
switch:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Fan Automation Enabled"
|
||||||
|
state_topic: "closet/status/fan/automation"
|
||||||
|
command_topic: "closet/fan/automation"
|
||||||
|
state_on: "enabled"
|
||||||
|
state_off: "disabled"
|
||||||
|
payload_on: "enable"
|
||||||
|
payload_off: "disable"
|
||||||
|
icon: mdi:fan
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "closet temperature"
|
||||||
|
state_topic: "closet/status/temp"
|
||||||
|
unit_of_measurement: 'Celcius'
|
||||||
|
|
||||||
|
# Example configuration.yaml entry using 'input_number' in an action in an automation
|
||||||
|
input_number:
|
||||||
|
fan_on_temp:
|
||||||
|
name: Fan On Temp
|
||||||
|
initial: 25
|
||||||
|
min: 20
|
||||||
|
max: 30
|
||||||
|
step: 0.5
|
||||||
|
unit_of_measurement: Celcuis
|
||||||
|
icon: mdi:target
|
||||||
|
mode: "box"
|
||||||
|
|
||||||
|
# This automation script runs when a value is received via MQTT on retained topic: setTemperature
|
||||||
|
# It sets the value slider on the GUI. This slides also had its own automation when the value is changed.
|
||||||
|
automation:
|
||||||
|
- alias: Closet Fan On Set Temperature slider
|
||||||
|
trigger:
|
||||||
|
platform: mqtt
|
||||||
|
topic: 'closet/status/fan/automation/temp'
|
||||||
|
action:
|
||||||
|
service: input_number.set_value
|
||||||
|
data_template:
|
||||||
|
entity_id: input_number.fan_on_temp
|
||||||
|
value: "{{ trigger.payload }}"
|
||||||
|
|
||||||
|
# This second automation script runs when the target temperature slider is moved.
|
||||||
|
# It publishes its value to the same MQTT topic it is also subscribed to.
|
||||||
|
- alias: Closet Fan Temp Slider Moved
|
||||||
|
trigger:
|
||||||
|
platform: state
|
||||||
|
entity_id: input_number.fan_on_temp
|
||||||
|
action:
|
||||||
|
service: mqtt.publish
|
||||||
|
data_template:
|
||||||
|
topic: 'closet/fan/automation/temp'
|
||||||
|
retain: true
|
||||||
|
payload: "{{ states('input_number.fan_on_temp') }}"
|
|
@ -0,0 +1,29 @@
|
||||||
|
input_datetime:
|
||||||
|
security_lights_interior_time_off:
|
||||||
|
has_time: true
|
||||||
|
initial: '22:30'
|
||||||
|
|
||||||
|
#
|
||||||
|
# security_lights_interior_on_offset:
|
||||||
|
# has_time: true
|
||||||
|
# initial: ':00'
|
||||||
|
|
||||||
|
automation:
|
||||||
|
- alias: security_lights_interior_time_off
|
||||||
|
trigger:
|
||||||
|
- platform: template
|
||||||
|
value_template: "{{ states('sensor.time') == states('input_datetime.security_lights_interior_time_off')[0:5] }}"
|
||||||
|
action:
|
||||||
|
- service: switch.turn_off
|
||||||
|
data:
|
||||||
|
entity_id: group.sonoffs
|
||||||
|
|
||||||
|
- alias: security_lights_interior_time_on
|
||||||
|
trigger:
|
||||||
|
platform: sun
|
||||||
|
event: sunset
|
||||||
|
offset: "-00:30:00"
|
||||||
|
action:
|
||||||
|
- service: switch.turn_on
|
||||||
|
data:
|
||||||
|
entity_id: group.sonoffs
|
|
@ -0,0 +1,23 @@
|
||||||
|
# values to store within HA
|
||||||
|
variable:
|
||||||
|
node_test_sum:
|
||||||
|
value: 0
|
||||||
|
# dummy device value to turn on and off
|
||||||
|
node_test_switch:
|
||||||
|
value: 'OFF'
|
||||||
|
attributes:
|
||||||
|
icon: mdi:flash
|
||||||
|
|
||||||
|
input_number:
|
||||||
|
node_test_first_number:
|
||||||
|
name: "First Number"
|
||||||
|
initial: 25
|
||||||
|
min: 0
|
||||||
|
max: 100
|
||||||
|
step: 1
|
||||||
|
node_test_second_number:
|
||||||
|
name: "Second Number"
|
||||||
|
initial: 75
|
||||||
|
min: 0
|
||||||
|
max: 100
|
||||||
|
step: 1
|
|
@ -0,0 +1,203 @@
|
||||||
|
|
||||||
|
# values to store within HA
|
||||||
|
variable:
|
||||||
|
test_schedule_countdown:
|
||||||
|
value: 0
|
||||||
|
attributes:
|
||||||
|
icon: mdi:clock
|
||||||
|
test_schedule_next:
|
||||||
|
value: 0
|
||||||
|
|
||||||
|
# input_boolean:
|
||||||
|
# test_schedule_enable:
|
||||||
|
|
||||||
|
input_number:
|
||||||
|
test_schedule_base_hour:
|
||||||
|
name: "Hour of Day"
|
||||||
|
unit_of_measurement: hours
|
||||||
|
initial: 6
|
||||||
|
min: 0
|
||||||
|
max: 24
|
||||||
|
step: 1
|
||||||
|
icon: mdi:camera-timer
|
||||||
|
test_schedule_base_minute:
|
||||||
|
name: "Minute of Day"
|
||||||
|
unit_of_measurement: minutes
|
||||||
|
initial: 0
|
||||||
|
min: 0
|
||||||
|
max: 45
|
||||||
|
step: 15
|
||||||
|
icon: mdi:camera-timer
|
||||||
|
|
||||||
|
# use sensor.test_schedule_repeatin to get corresponding value for option
|
||||||
|
input_select:
|
||||||
|
test_schedule_repeatin:
|
||||||
|
name: Repeat Every
|
||||||
|
icon: mdi:repeat
|
||||||
|
initial: 12 Hours
|
||||||
|
options:
|
||||||
|
- 3 Hours
|
||||||
|
- 6 Hours
|
||||||
|
- 12 Hours
|
||||||
|
- Day
|
||||||
|
- Every Other Day
|
||||||
|
- Every Third Day
|
||||||
|
- Every Week
|
||||||
|
values:
|
||||||
|
- 3
|
||||||
|
- 6
|
||||||
|
- 12
|
||||||
|
- 24
|
||||||
|
- 48
|
||||||
|
- 72
|
||||||
|
- 168
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: template # Derived variables
|
||||||
|
sensors:
|
||||||
|
# format the next timestamp for humans
|
||||||
|
test_schedule_next:
|
||||||
|
friendly_name: Next run to start at
|
||||||
|
entity_id: variable.test_schedule_next_timestamp
|
||||||
|
value_template: '{{ states.variable.test_schedule_next.state | int | timestamp_custom("%A, %d %h %H:%M") }}'
|
||||||
|
test_schedule_countdown:
|
||||||
|
friendly_name: Countdown to next run
|
||||||
|
entity_id: variable.test_schedule_countdown
|
||||||
|
# value_template: '{{ states.variable.test_schedule_countdown.state | int | timestamp_custom("%A, %d %h %H:%M") }}'
|
||||||
|
value_template: >-
|
||||||
|
{% set time = states.variable.test_schedule_countdown.state | int %}
|
||||||
|
{% set minutes = ((time % 3600) / 60) | int %}
|
||||||
|
{% set hours = ((time % 86400) / 3600) | int %}
|
||||||
|
{% set days = (time / 86400) | int %}
|
||||||
|
{{time}} seconds is {{ days }}:{{ hours }}:{{minutes}} (D:H:M)
|
||||||
|
test_schedule_delta:
|
||||||
|
entity_id: input_select.test_schedule_repeatin
|
||||||
|
unit_of_measurement: 'hours'
|
||||||
|
value_template: >
|
||||||
|
{% for option in state_attr("input_select.test_schedule_repeatin", "options") -%}
|
||||||
|
{% if is_state("input_select.test_schedule_repeatin", option) -%}
|
||||||
|
{{ state_attr("input_select.test_schedule_repeatin", 'values')[loop.index - 1] }}
|
||||||
|
{%- endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
|
||||||
|
|
||||||
|
switch:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Scheduler Test Solenoid"
|
||||||
|
state_topic: "status/irrigation/zone_1"
|
||||||
|
command_topic: "set/irrigation/zone_1"
|
||||||
|
payload_on: "ON"
|
||||||
|
payload_off: "OFF"
|
||||||
|
qos: 0
|
||||||
|
retain: true
|
||||||
|
|
||||||
|
|
||||||
|
script:
|
||||||
|
enable_test_schedule:
|
||||||
|
sequence:
|
||||||
|
- event: ENABLE_SCHEDULE
|
||||||
|
- service: variable.set_variable
|
||||||
|
data:
|
||||||
|
variable: test_schedule_countdown
|
||||||
|
value_template: '{{ states.variable.test_schedule_next_timestamp.state | int - as_timestamp(now()) }}'
|
||||||
|
# value_template: "{{ as_timestamp(now()) }}"
|
||||||
|
# - service: variable.set_variable
|
||||||
|
# data:
|
||||||
|
# variable: test_schedule_dummy_device
|
||||||
|
# value: 'ON'
|
||||||
|
# - service: automation.turn_on
|
||||||
|
# entity_id: automation.test_schedule_countdown
|
||||||
|
# - service: automation.turn_on
|
||||||
|
# data:
|
||||||
|
# entity_id: automation.test_timer_zero_trigger
|
||||||
|
disable_test_schedule:
|
||||||
|
sequence:
|
||||||
|
- service: automation.turn_off
|
||||||
|
data:
|
||||||
|
entity_id: automation.test_schedule_countdown
|
||||||
|
- service: variable.set_variable
|
||||||
|
data:
|
||||||
|
variable: test_schedule_countdown
|
||||||
|
value: 0
|
||||||
|
|
||||||
|
|
||||||
|
automation:
|
||||||
|
- alias: test_schedule_countdown
|
||||||
|
initial_state: false
|
||||||
|
trigger:
|
||||||
|
platform: time_pattern
|
||||||
|
seconds: '/1'
|
||||||
|
action:
|
||||||
|
- service: variable.set_variable
|
||||||
|
data:
|
||||||
|
variable: test_schedule_countdown
|
||||||
|
value_template: '{{ [((variable.state | int) - 1), 0] | max }}'
|
||||||
|
# - alias: test_schedule_changed
|
||||||
|
# # initial_state: false
|
||||||
|
# trigger:
|
||||||
|
# platform: state
|
||||||
|
# action:
|
||||||
|
# - service: variable.set_variable
|
||||||
|
# data:
|
||||||
|
# variable: test_schedule_countdown
|
||||||
|
# value_template: '{{ states.variable.test_schedule_next_timestamp.state | int - as_timestamp(now()) }}'
|
||||||
|
|
||||||
|
|
||||||
|
# - service: automation.turn_off
|
||||||
|
# data:
|
||||||
|
# entity_id: automation.test_timer_zero_trigger
|
||||||
|
# - service: variable.set_variable
|
||||||
|
# data:
|
||||||
|
# variable: test_timer_device
|
||||||
|
# value: 'OFF'
|
||||||
|
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
# - alias: test_timer_zero_trigger
|
||||||
|
# initial_state: false
|
||||||
|
# trigger:
|
||||||
|
# platform: numeric_state
|
||||||
|
# entity_id: variable.test_timer_countdown
|
||||||
|
# below: 1
|
||||||
|
# action:
|
||||||
|
# - service: automation.turn_off
|
||||||
|
# entity_id: automation.test_timer_countdown
|
||||||
|
# - service: variable.set_variable
|
||||||
|
# data:
|
||||||
|
# variable: test_timer_device
|
||||||
|
# value: 'OFF'
|
||||||
|
# #
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# return corresponding value from input_select option of same name
|
||||||
|
# test_schedule_delta:
|
||||||
|
# entity_id: input_select.test_schedule_repeatin
|
||||||
|
# value_template: >
|
||||||
|
# {% for option in state_attr("input_select.test_schedule_repeatin", "options") -%}
|
||||||
|
# {% if is_state("input_select.test_schedule_repeatin", option) -%}
|
||||||
|
# {{ state_attr("input_select.test_schedule_repeatin", 'values')[loop.index - 1] }}
|
||||||
|
# {%- endif %}
|
||||||
|
# {%- endfor %}
|
||||||
|
# test_schedule_base_timestamp:
|
||||||
|
# entity_id: input_datetime.test_schedule_base
|
||||||
|
# value_template: >
|
||||||
|
# {{
|
||||||
|
# as_timestamp(now())
|
||||||
|
# - ( now().second + now().minute | int * 60 + now().hour | int * 3600 )
|
||||||
|
# + state_attr('input_datetime.test_schedule_base','hour')|int * 3600
|
||||||
|
# + state_attr('input_datetime.test_schedule_base','minute') * 60
|
||||||
|
# }}
|
||||||
|
# value_template: "{{ (state_attr('input_datetime.test_schedule_base','hour')|int * 3600 + state_attr('input_datetime.test_schedule_base','minute')|int * 60) | timestamp_custom('%A, %d %h %H:%M') }}"
|
||||||
|
# value_template: >
|
||||||
|
# "{{ (state_attr('input_datetime.test_schedule_base','hour')|int * 3600 +
|
||||||
|
# state_attr('input_datetime.test_schedule_base','minute')|int * 60 +
|
||||||
|
# as_timestamp(now()))
|
||||||
|
# | timestamp_custom("%A, %d %h %H:%M") }}"
|
||||||
|
|
||||||
|
# value_template: "{{ states(input_datetime.test_schedule_base) }}"
|
||||||
|
# {{ as_timestamp(now()) + as_timestamp(states.input_datetime.test_schedule_base) }}
|
||||||
|
# {{ as_timestamp(now()) + as_timestamp(states.input_datetime.test_schedule_base) }}
|
|
@ -0,0 +1,85 @@
|
||||||
|
|
||||||
|
homeassistant:
|
||||||
|
customize:
|
||||||
|
switch.all_sonoffs:
|
||||||
|
friendly_name: "All Sonoffs on Network"
|
||||||
|
|
||||||
|
switch:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Sonoff White"
|
||||||
|
state_topic: "stat/sonoff/white/RESULT"
|
||||||
|
value_template: "{{ value_json.POWER }}"
|
||||||
|
command_topic: "cmnd/sonoff/white/POWER"
|
||||||
|
# availability_topic: "tele/sonoff/STATE"
|
||||||
|
qos: 1
|
||||||
|
payload_on: "ON"
|
||||||
|
payload_off: "OFF"
|
||||||
|
# payload_available: "Online"
|
||||||
|
# payload_not_available: "Offline"
|
||||||
|
retain: true
|
||||||
|
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Sonoff Brown"
|
||||||
|
state_topic: "stat/sonoff/brown/RESULT"
|
||||||
|
value_template: "{{ value_json.POWER }}"
|
||||||
|
command_topic: "cmnd/sonoff/brown/POWER"
|
||||||
|
# availability_topic: "tele/sonoff/STATE"
|
||||||
|
qos: 1
|
||||||
|
payload_on: "ON"
|
||||||
|
payload_off: "OFF"
|
||||||
|
# payload_available: "Online"
|
||||||
|
# payload_not_available: "Offline"
|
||||||
|
retain: true
|
||||||
|
|
||||||
|
- platform: template
|
||||||
|
switches:
|
||||||
|
all_sonoffs:
|
||||||
|
value_template: "{{ is_state('group.sonoffs','on') }}"
|
||||||
|
turn_on:
|
||||||
|
- service: homeassistant.turn_on
|
||||||
|
entity_id: group.sonoffs
|
||||||
|
turn_off:
|
||||||
|
- service: homeassistant.turn_off
|
||||||
|
entity_id: group.sonoffs
|
||||||
|
|
||||||
|
group:
|
||||||
|
sonoffs:
|
||||||
|
entities:
|
||||||
|
- switch.sonoff_white
|
||||||
|
- switch.sonoff_brown
|
||||||
|
|
||||||
|
|
||||||
|
automation:
|
||||||
|
- alias: “Power state on HA start-up”
|
||||||
|
trigger:
|
||||||
|
platform: homeassistant
|
||||||
|
event: start
|
||||||
|
action:
|
||||||
|
- service: mqtt.publish
|
||||||
|
data:
|
||||||
|
topic: “cmd/sonoff/white/state”
|
||||||
|
payload: “”
|
||||||
|
- service: mqtt.publish
|
||||||
|
data:
|
||||||
|
topic: cmd/sonoff/white/POWER
|
||||||
|
|
||||||
|
|
||||||
|
# automation:
|
||||||
|
# - alias: "Power State Sonoffs"
|
||||||
|
# trigger:
|
||||||
|
# platform: homeassistant
|
||||||
|
# event: start
|
||||||
|
# action:
|
||||||
|
# - service: mqtt.publish
|
||||||
|
# data:
|
||||||
|
# topic: "cmnd/sonoff/white/state"
|
||||||
|
# payload: ""
|
||||||
|
# - service: mqtt.publish
|
||||||
|
# data:
|
||||||
|
# topic: cmnd/tasmotas/POWER
|
||||||
|
# - service: mqtt.publish
|
||||||
|
# data:
|
||||||
|
# topic: cmnd/tasmotas/POWER2
|
||||||
|
# - service: mqtt.publish
|
||||||
|
# data:
|
||||||
|
# topic: cmnd/tasmotas/POWER3
|
|
@ -0,0 +1,117 @@
|
||||||
|
|
||||||
|
# values to store within HA
|
||||||
|
variable:
|
||||||
|
test_timer_countdown:
|
||||||
|
value: 0
|
||||||
|
attributes:
|
||||||
|
icon: mdi:timer
|
||||||
|
# dummy device value to turn on and off
|
||||||
|
test_timer_device:
|
||||||
|
value: 'OFF'
|
||||||
|
attributes:
|
||||||
|
icon: mdi:flash
|
||||||
|
|
||||||
|
input_number:
|
||||||
|
test_timer_duration:
|
||||||
|
name: "Duration"
|
||||||
|
unit_of_measurement: seconds
|
||||||
|
initial: 3
|
||||||
|
min: 1
|
||||||
|
max: 15
|
||||||
|
step: 1
|
||||||
|
icon: mdi:camera-timer
|
||||||
|
|
||||||
|
script:
|
||||||
|
start_test_timer:
|
||||||
|
sequence:
|
||||||
|
- service: variable.set_variable
|
||||||
|
data:
|
||||||
|
variable: test_timer_countdown
|
||||||
|
value_template: '{{ states.input_number.test_timer_duration.state | int }}'
|
||||||
|
- service: variable.set_variable
|
||||||
|
data:
|
||||||
|
variable: test_timer_device
|
||||||
|
value: 'ON'
|
||||||
|
- service: automation.turn_on
|
||||||
|
data:
|
||||||
|
entity_id: automation.test_timer_countdown
|
||||||
|
- service: automation.turn_on
|
||||||
|
data:
|
||||||
|
entity_id: automation.test_timer_zero_trigger
|
||||||
|
|
||||||
|
|
||||||
|
abort_test_timer:
|
||||||
|
sequence:
|
||||||
|
- service: automation.turn_off
|
||||||
|
data:
|
||||||
|
entity_id: automation.test_timer_countdown
|
||||||
|
- service: automation.turn_off
|
||||||
|
data:
|
||||||
|
entity_id: automation.test_timer_zero_trigger
|
||||||
|
- service: variable.set_variable
|
||||||
|
data:
|
||||||
|
variable: test_timer_device
|
||||||
|
value: 'OFF'
|
||||||
|
- service: persistent_notification.create
|
||||||
|
data:
|
||||||
|
message: "Timer was Aborted"
|
||||||
|
title: "ALERT"
|
||||||
|
|
||||||
|
#
|
||||||
|
automation:
|
||||||
|
- alias: test_timer_zero_trigger
|
||||||
|
initial_state: false
|
||||||
|
trigger:
|
||||||
|
platform: numeric_state
|
||||||
|
entity_id: variable.test_timer_countdown
|
||||||
|
below: 1
|
||||||
|
action:
|
||||||
|
- service: automation.turn_off
|
||||||
|
entity_id: automation.test_timer_countdown
|
||||||
|
- service: variable.set_variable
|
||||||
|
data:
|
||||||
|
variable: test_timer_device
|
||||||
|
value: 'OFF'
|
||||||
|
#
|
||||||
|
- alias: test_timer_countdown
|
||||||
|
initial_state: false
|
||||||
|
trigger:
|
||||||
|
platform: time_pattern
|
||||||
|
seconds: '/1'
|
||||||
|
action:
|
||||||
|
- service: variable.set_variable
|
||||||
|
data:
|
||||||
|
variable: test_timer_countdown
|
||||||
|
value_template: '{{ [((variable.state | int) - 1), 0] | max }}'
|
||||||
|
|
||||||
|
# lovelace ui view for timer, copy into ui-lovelace.yaml as a view (lovelace in yaml mode)
|
||||||
|
# - title: Timer
|
||||||
|
# cards:
|
||||||
|
# - type: vertical-stack
|
||||||
|
# cards:
|
||||||
|
# - type: horizontal-stack
|
||||||
|
# cards:
|
||||||
|
# - type: entity-button
|
||||||
|
# name: start timer countdown
|
||||||
|
# icon: mdi:timer
|
||||||
|
# tap_action:
|
||||||
|
# action: call-service
|
||||||
|
# service: script.turn_on
|
||||||
|
# service_data:
|
||||||
|
# entity_id: script.start_test_timer
|
||||||
|
# entity: script.start_test_timer
|
||||||
|
# - type: entity-button
|
||||||
|
# name: abort timer
|
||||||
|
# icon: mdi:timer-off
|
||||||
|
# tap_action:
|
||||||
|
# action: call-service
|
||||||
|
# service: script.turn_on
|
||||||
|
# service_data:
|
||||||
|
# entity_id: script.abort_test_timer
|
||||||
|
# entity: script.abort_test_timer
|
||||||
|
# - type: entities
|
||||||
|
# entities:
|
||||||
|
# - entity: variable.test_timer_countdown
|
||||||
|
# - entity: variable.test_timer_device
|
||||||
|
# name: dummy device
|
||||||
|
# - entity: input_number.test_timer_duration
|
|
@ -0,0 +1,73 @@
|
||||||
|
# package of switches to interface the the fio (four in one) socket example of @uci/base
|
||||||
|
switch:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "FIO Switch 1"
|
||||||
|
state_topic: "fio/switch/status/1"
|
||||||
|
command_topic: "fio/switch/set/1"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "FIO Switch 2"
|
||||||
|
state_topic: "fio/switch/status/2"
|
||||||
|
command_topic: "fio/switch/set/2"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "FIO Switch 3"
|
||||||
|
state_topic: "fio/switch/status/3"
|
||||||
|
command_topic: "fio/switch/set/3"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "FIO Switch 4"
|
||||||
|
state_topic: "fio/switch/status/4"
|
||||||
|
command_topic: "fio/switch/set/4"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
|
||||||
|
# copy and paste and uncomment below as a view under views: in ui-lovelace.yaml
|
||||||
|
# - title: UCI Base FIO Example
|
||||||
|
# cards:
|
||||||
|
# - type: vertical-stack
|
||||||
|
# cards:
|
||||||
|
# - type: markdown
|
||||||
|
# content: >
|
||||||
|
# # Four In One UCI Base Example
|
||||||
|
# - type: horizontal-stack
|
||||||
|
# cards:
|
||||||
|
# - type: entity-button
|
||||||
|
# entity: switch.fio_switch_1
|
||||||
|
# icon: mdi:water
|
||||||
|
# name: Switch 1
|
||||||
|
# tap_action:
|
||||||
|
# action: toggle
|
||||||
|
# - type: entity-button
|
||||||
|
# entity: switch.fio_switch_2
|
||||||
|
# icon: mdi:water
|
||||||
|
# name: Switch 2
|
||||||
|
# tap_action:
|
||||||
|
# action: toggle
|
||||||
|
# - type: entity-button
|
||||||
|
# entity: switch.fio_switch_3
|
||||||
|
# icon: mdi:water
|
||||||
|
# name: Switch 3
|
||||||
|
# tap_action:
|
||||||
|
# action: toggle
|
||||||
|
# - type: entity-button
|
||||||
|
# entity: switch.fio_switch_4
|
||||||
|
# icon: mdi:water
|
||||||
|
# name: Switch 4
|
||||||
|
# tap_action:
|
||||||
|
# action: toggle
|
|
@ -0,0 +1,78 @@
|
||||||
|
# package of switches to test gpio pins/relays for @uci/gpio example
|
||||||
|
# gpio pins on pine64 in order for relays 1-8 const PINS = [80,73,69,230,229,75,74,70]
|
||||||
|
switch:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "pine64 Switch 1"
|
||||||
|
state_topic: "relay/status/80"
|
||||||
|
command_topic: "relay/set/80"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "pine64 Switch 2"
|
||||||
|
state_topic: "relay/status/73"
|
||||||
|
command_topic: "relay/set/73"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "pine64 Switch 3"
|
||||||
|
state_topic: "relay/status/69"
|
||||||
|
command_topic: "relay/set/69"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "pine64 Switch 4"
|
||||||
|
state_topic: "relay/status/230"
|
||||||
|
command_topic: "relay/set/230"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "pine64 Switch 5"
|
||||||
|
state_topic: "relay/status/229"
|
||||||
|
command_topic: "relay/set/229"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "pine64 Switch 6"
|
||||||
|
state_topic: "relay/status/75"
|
||||||
|
command_topic: "relay/set/75"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "pine64 Switch 7"
|
||||||
|
state_topic: "relay/status/74"
|
||||||
|
command_topic: "relay/set/74"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
- platform: mqtt
|
||||||
|
name: "pine64 Switch 8"
|
||||||
|
state_topic: "relay/status/70"
|
||||||
|
command_topic: "relay/set/70"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
icon: mdi:lightbulb
|
||||||
|
|
||||||
|
|
||||||
|
# copy and paste and uncomment below as a view under views: in ui-lovelace.yaml
|
|
@ -0,0 +1,37 @@
|
||||||
|
homeassistant:
|
||||||
|
customize:
|
||||||
|
fan.fan_state:
|
||||||
|
friendly_name: 'Fan State (manual override)'
|
||||||
|
sensor.closet_temperature:
|
||||||
|
friendly_name: 'Closet Temperature (C)'
|
||||||
|
input_number.fan_on_temp:
|
||||||
|
friendly_name: 'Fan On Set Temperature (C)'
|
||||||
|
|
||||||
|
switch:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Circuit 1"
|
||||||
|
state_topic: "lighting/status/circuits/x1-Erof"
|
||||||
|
command_topic: "lighting/set/circuits/x1-Erof"
|
||||||
|
state_on: "on"
|
||||||
|
state_off: "off"
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
|
||||||
|
light:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Panic"
|
||||||
|
command_topic: "lighting/set/switches/panic-xr2R"
|
||||||
|
state_topic: "lighting/status/switches/panic-xr2R"
|
||||||
|
# brightness_state_topic: "lighting/status/switches/panic-xr2R"
|
||||||
|
# brightness_command_topic: "bogus"
|
||||||
|
# brightness_scale: 100
|
||||||
|
qos: 0
|
||||||
|
payload_on: "on"
|
||||||
|
payload_off: "off"
|
||||||
|
optimistic: true
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Panic On Percent"
|
||||||
|
unit_of_measurement: '%'
|
||||||
|
state_topic: "lighting/status/switches/panic-xr2R"
|
|
@ -0,0 +1,34 @@
|
||||||
|
weather:
|
||||||
|
- platform: darksky
|
||||||
|
api_key: 21b1870b9e52cf1ce234798ddfd0fbce
|
||||||
|
name: Praire City Weather
|
||||||
|
mode: daily
|
||||||
|
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: darksky
|
||||||
|
api_key: 21b1870b9e52cf1ce234798ddfd0fbce
|
||||||
|
name: forecast
|
||||||
|
forecast:
|
||||||
|
- 0
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
hourly_forecast:
|
||||||
|
- 0
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
monitored_conditions:
|
||||||
|
- summary
|
||||||
|
- icon
|
||||||
|
- temperature
|
||||||
|
- dew_point
|
||||||
|
- wind_speed
|
||||||
|
- wind_bearing
|
||||||
|
- cloud_cover
|
||||||
|
- temperature_high
|
||||||
|
- temperature_low
|
||||||
|
- moon_phase
|
||||||
|
- sunset_time
|
||||||
|
- alerts
|
||||||
|
scan_interval: 00:10
|
|
@ -0,0 +1,204 @@
|
||||||
|
sensor:
|
||||||
|
- platform: template
|
||||||
|
sensors:
|
||||||
|
# values used for run
|
||||||
|
zone_1_base_time:
|
||||||
|
value_template: '{{ "{:02d}".format(states.input_number.zone_1_timer_hours.state|int) }}:{{ "{:02d}".format(states.input_number.zone_1_timer_minutes.state|int) }}'
|
||||||
|
friendly_name: "Base TOD HH:MM"
|
||||||
|
zone_1_duration:
|
||||||
|
value_template: '{{states.input_number.zone_1_duration.state | int}}mins'
|
||||||
|
friendly_name: "Duration"
|
||||||
|
zone_1_repeat:
|
||||||
|
value_template: '{{ "{:02d}".format(states.input_number.zone_1_repeat.state|int)}} hrs'
|
||||||
|
friendly_name: "Repeat in"
|
||||||
|
# run computed/state values
|
||||||
|
zone_1_time_delta:
|
||||||
|
friendly_name: "Next Run (secs)"
|
||||||
|
value_template: '{{states.input_number.zone_1_repeat.state|int * 3600}}'
|
||||||
|
zone_1_next_run:
|
||||||
|
friendly_name: "Next Run"
|
||||||
|
value_template: '{{states.sensor.zone_1_timer_next.state}}'
|
||||||
|
zone_1_last_run:
|
||||||
|
friendly_name: "Last Run"
|
||||||
|
value_template: '{{ (as_timestamp(states.switch.zone_1.last_changed)) | timestamp_custom("%A, %d %h %H:%M") }}'
|
||||||
|
|
||||||
|
# zone_1_rain_sensor:
|
||||||
|
# friendly_name: "Rainfall Threshold Sensitivity"
|
||||||
|
# value_template: >-
|
||||||
|
# {% if states.sensor.pws_precip_1d.state <= states.input_number.pws_precip_1d_sensitivity.state and states.sensor.pws_precip_today_metric.state <= states.input_number.pws_precip_today_metric_sensitivity.state %}
|
||||||
|
# dry
|
||||||
|
# {% else %}
|
||||||
|
# too wet
|
||||||
|
# {% endif %}
|
||||||
|
|
||||||
|
input_number:
|
||||||
|
zone_1_timer_minutes:
|
||||||
|
name: "Minutes"
|
||||||
|
initial: 0
|
||||||
|
min: 0
|
||||||
|
max: 55
|
||||||
|
step: 5
|
||||||
|
icon: mdi:timer
|
||||||
|
zone_1_timer_hours:
|
||||||
|
name: "Hour"
|
||||||
|
initial: 6
|
||||||
|
min: 0
|
||||||
|
max: 23
|
||||||
|
step: 1
|
||||||
|
icon: mdi:timer
|
||||||
|
zone_1_duration:
|
||||||
|
name: "Duration"
|
||||||
|
initial: 3
|
||||||
|
min: 1
|
||||||
|
max: 15
|
||||||
|
step: 1
|
||||||
|
icon: mdi:camera-timer
|
||||||
|
zone_1_repeat:
|
||||||
|
name: "Repeat Every - HH"
|
||||||
|
initial: 24
|
||||||
|
min: 0
|
||||||
|
max: 48
|
||||||
|
step: 6
|
||||||
|
icon: mdi:repeat
|
||||||
|
# #
|
||||||
|
# #
|
||||||
|
group:
|
||||||
|
zone_1_status:
|
||||||
|
icon: mdi:clock
|
||||||
|
entities:
|
||||||
|
- sensor.zone_1_last_run
|
||||||
|
- sensor.zone_1_next_run
|
||||||
|
# - sensor.zone_1_duration
|
||||||
|
# - sensor.zone_1_repeat
|
||||||
|
zone_1_set:
|
||||||
|
icon: mdi:settings
|
||||||
|
entities:
|
||||||
|
- sensor.zone_1_base_time
|
||||||
|
- input_number.zone_1_timer_hours
|
||||||
|
- input_number.zone_1_timer_minutes
|
||||||
|
- input_number.zone_1_duration
|
||||||
|
- input_number.zone_1_repeat
|
||||||
|
#- input_boolean.zone_1_reset_zone
|
||||||
|
|
||||||
|
# Zone switch, trigger, and run script
|
||||||
|
|
||||||
|
sensor mqtt:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Zone 1 Timer Next"
|
||||||
|
state_topic: "run/irrigation/zone1"
|
||||||
|
|
||||||
|
input_boolean:
|
||||||
|
zone_1_reset_zone:
|
||||||
|
name: Reset Next Run Timer
|
||||||
|
initial: off
|
||||||
|
icon: mdi:lock-reset
|
||||||
|
|
||||||
|
switch mqtt:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Zone 1"
|
||||||
|
state_topic: "status/irrigation/zone_1"
|
||||||
|
command_topic: "set/irrigation/zone_1"
|
||||||
|
payload_on: "ON"
|
||||||
|
payload_off: "OFF"
|
||||||
|
qos: 0
|
||||||
|
retain: true
|
||||||
|
|
||||||
|
switch:
|
||||||
|
- platform: mqtt
|
||||||
|
name: "Zone 1 Timer"
|
||||||
|
state_topic: "status/irrigation/timer/zone_1"
|
||||||
|
command_topic: "set/irrigation/timer/zone_1"
|
||||||
|
state_on: "enabled"
|
||||||
|
state_off: "disabled"
|
||||||
|
payload_on: "enable"
|
||||||
|
payload_off: "disable"
|
||||||
|
icon: mdi:timer
|
||||||
|
|
||||||
|
automation zone_1_trigger:
|
||||||
|
alias: "Trigger Zone 1 Run"
|
||||||
|
trigger:
|
||||||
|
- platform: time_pattern
|
||||||
|
minutes: '/1'
|
||||||
|
condition:
|
||||||
|
condition: and
|
||||||
|
conditions:
|
||||||
|
- condition: template
|
||||||
|
value_template: '{{(as_timestamp(now()) | timestamp_custom("%A, %d %h %H:%M")) == states.sensor.zone_1_next_run.state}}'
|
||||||
|
# - condition: state
|
||||||
|
# entity_id: sensor.rain_sensor
|
||||||
|
# state: 'dry'
|
||||||
|
action:
|
||||||
|
- service: script.turn_on
|
||||||
|
entity_id: script.zone_1_run
|
||||||
|
|
||||||
|
script:
|
||||||
|
zone_1_run:
|
||||||
|
sequence:
|
||||||
|
- alias: "Switch on Zone 1"
|
||||||
|
service: switch.turn_on
|
||||||
|
entity_id: switch.zone_1
|
||||||
|
- delay: '00:{{ states.input_number.zone_1_duration.state | int }}:00'
|
||||||
|
- alias: "Switch off Zone 1"
|
||||||
|
service: switch.turn_off
|
||||||
|
entity_id: switch.zone_1
|
||||||
|
- alias: "Update Next Run Time"
|
||||||
|
service: mqtt.publish
|
||||||
|
data:
|
||||||
|
topic: "run/irrigation/zone1"
|
||||||
|
retain: 1
|
||||||
|
payload_template: '{{ (as_timestamp(states.switch.zone_1.last_changed) + states.sensor.zone_1_time_delta.state | int) | timestamp_custom("%A, %d %h %H:%M") }}'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# automation zone_1_2:
|
||||||
|
# alias: "Zone 1 Active Notification"
|
||||||
|
# hide_entity: False
|
||||||
|
# trigger:
|
||||||
|
# - platform: state
|
||||||
|
# entity_id: switch.zone_1
|
||||||
|
# from: 'off'
|
||||||
|
# to: 'on'
|
||||||
|
# action:
|
||||||
|
# - service: notify.pushbullet
|
||||||
|
# data:
|
||||||
|
# title: "Irrigation Zone 1"
|
||||||
|
# message: "Watering has started"
|
||||||
|
#
|
||||||
|
# automation zone_1_3:
|
||||||
|
# alias: "Zone 1 Completed Notification"
|
||||||
|
# hide_entity: False
|
||||||
|
# trigger:
|
||||||
|
# - platform: state
|
||||||
|
# entity_id: switch.zone_1
|
||||||
|
# from: 'on'
|
||||||
|
# to: 'off'
|
||||||
|
# action:
|
||||||
|
# - service: notify.pushbullet
|
||||||
|
# data:
|
||||||
|
# title: "Irrigation Zone 1"
|
||||||
|
# message: "Watering has completed"
|
||||||
|
#
|
||||||
|
# automation zone_1_4:
|
||||||
|
# alias: "Zone 1 Timer Reset"
|
||||||
|
# trigger:
|
||||||
|
# - platform: state
|
||||||
|
# entity_id: input_boolean.reset_zone_1
|
||||||
|
# from: 'off'
|
||||||
|
# to: 'on'
|
||||||
|
# action:
|
||||||
|
# - service: mqtt.publish
|
||||||
|
# data:
|
||||||
|
# topic: "cmnd/zone_1_control/TIMER"
|
||||||
|
# retain: 1
|
||||||
|
# payload_template: >-
|
||||||
|
# {%if now().strftime("%H:%M") > states.sensor.zone_1_timer.state %}
|
||||||
|
# {{(as_timestamp(now() )+24*3600 ) | timestamp_custom("%A, %d %h ")}}{{states.sensor.zone_1_timer.state}}
|
||||||
|
# {%else%}
|
||||||
|
# {{(as_timestamp(now() ) ) | timestamp_custom("%A, %d %h ")}}{{states.sensor.zone_1_timer.state}}
|
||||||
|
# {%endif%}
|
||||||
|
# - delay:
|
||||||
|
# seconds: 1
|
||||||
|
# - service: input_boolean.turn_off
|
||||||
|
# data:
|
||||||
|
# entity_id: input_boolean.reset_zone_1
|
||||||
|
#
|
|
@ -0,0 +1,13 @@
|
||||||
|
title: 238 McHaley
|
||||||
|
# custom cards registration
|
||||||
|
resources:
|
||||||
|
- url: /hacsfiles/banner-card/banner-card.js
|
||||||
|
type: module
|
||||||
|
- url: /hacsfiles/button-card/button-card.js
|
||||||
|
type: module
|
||||||
|
|
||||||
|
views:
|
||||||
|
- !include /config/lovelace/uci-lighting.yaml
|
||||||
|
- !include /config/lovelace/temps.yaml
|
||||||
|
- !include /config/lovelace/closet.yaml
|
||||||
|
- !include /config/lovelace/lights.yaml
|
|
@ -0,0 +1 @@
|
||||||
|
*.js.gz
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
||||||
|
*.js.gz
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue