Compare commits

...

11 Commits

37 changed files with 328 additions and 156 deletions

View File

@ -1 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width,initial-scale=1"><link rel="icon" href="/favicon.ico"><link rel="stylesheet" href="/fonts/poppins.css"><title>platypush</title><script defer="defer" type="module" src="/static/js/chunk-vendors.95bedba1.js"></script><script defer="defer" type="module" src="/static/js/app.484f9c7c.js"></script><link href="/static/css/chunk-vendors.0fcd36f0.css" rel="stylesheet"><link href="/static/css/app.d7cb662c.css" rel="stylesheet"><script defer="defer" src="/static/js/chunk-vendors-legacy.79dede0c.js" nomodule></script><script defer="defer" src="/static/js/app-legacy.36cc00f9.js" nomodule></script></head><body><noscript><strong>We're sorry but platypush doesn't work properly without JavaScript enabled. Please enable it to continue.</strong></noscript><div id="app"></div></body></html>
<!doctype html><html lang="en"><head><meta charset="utf-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width,initial-scale=1"><link rel="icon" href="/favicon.ico"><link rel="stylesheet" href="/fonts/poppins.css"><title>platypush</title><script defer="defer" type="module" src="/static/js/chunk-vendors.95bedba1.js"></script><script defer="defer" type="module" src="/static/js/app.da4780e5.js"></script><link href="/static/css/chunk-vendors.0fcd36f0.css" rel="stylesheet"><link href="/static/css/app.d7cb662c.css" rel="stylesheet"><script defer="defer" src="/static/js/chunk-vendors-legacy.79dede0c.js" nomodule></script><script defer="defer" src="/static/js/app-legacy.c91c6b3d.js" nomodule></script></head><body><noscript><strong>We're sorry but platypush doesn't work properly without JavaScript enabled. Please enable it to continue.</strong></noscript><div id="app"></div></body></html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
"use strict";(self["webpackChunkplatypush"]=self["webpackChunkplatypush"]||[]).push([[3077,3459],{6:function(e,t,n){n.d(t,{Z:function(){return f}});var o=n(6252),i=n(3577),r=n(9963),l=function(e){return(0,o.dD)("data-v-a6396ae8"),e=e(),(0,o.Cn)(),e},c=["checked"],a=l((function(){return(0,o._)("div",{class:"switch"},[(0,o._)("div",{class:"dot"})],-1)})),u={class:"label"};function s(e,t,n,l,s,d){return(0,o.wg)(),(0,o.iD)("div",{class:(0,i.C_)(["power-switch",{disabled:n.disabled}]),onClick:t[0]||(t[0]=(0,r.iM)((function(){return d.onInput&&d.onInput.apply(d,arguments)}),["stop"]))},[(0,o._)("input",{type:"checkbox",checked:n.value},null,8,c),(0,o._)("label",null,[a,(0,o._)("span",u,[(0,o.WI)(e.$slots,"default",{},void 0,!0)])])],2)}var d={name:"ToggleSwitch",emits:["input"],props:{value:{type:Boolean,default:!1},disabled:{type:Boolean,default:!1}},methods:{onInput:function(e){if(this.disabled)return!1;this.$emit("input",e)}}},p=n(3744);const v=(0,p.Z)(d,[["render",s],["__scopeId","data-v-a6396ae8"]]);var f=v},3077:function(e,t,n){n.r(t),n.d(t,{default:function(){return b}});n(8309);var o=n(6252),i=n(3577),r=n(9963),l={class:"entity bluetooth-service-container"},c={class:"head"},a={class:"col-1 icon"},u={class:"col-9 label"},s=["textContent"],d={class:"col-2 connector pull-right"};function p(e,t,n,p,v,f){var h=(0,o.up)("EntityIcon"),m=(0,o.up)("ToggleSwitch");return(0,o.wg)(),(0,o.iD)("div",l,[(0,o._)("div",c,[(0,o._)("div",a,[(0,o.Wm)(h,{entity:e.value,loading:e.loading,error:e.error},null,8,["entity","loading","error"])]),(0,o._)("div",u,[(0,o._)("div",{class:"name",textContent:(0,i.zw)(e.value.name)},null,8,s)]),(0,o._)("div",d,[(0,o.Wm)(m,{value:e.parent.connected,disabled:e.loading,onInput:f.connect,onClick:t[0]||(t[0]=(0,r.iM)((function(){}),["stop"]))},null,8,["value","disabled","onInput"])])])])}var v=n(8534),f=(n(5666),n(6)),h=n(3459),m=n(7909),g={name:"BluetoothService",components:{ToggleSwitch:f.Z,EntityIcon:h["default"]},mixins:[m["default"]],methods:{connect:function(e){var t=this;return(0,v.Z)(regeneratorRuntime.mark((function n(){return regeneratorRuntime.wrap((function(n){while(1)switch(n.prev=n.next){case 0:return e.stopPropagation(),t.$emit("loading",!0),n.prev=2,n.next=5,t.request("bluetooth.connect",{device:t.parent.address,service_uuid:t.uuid});case 5:return n.prev=5,t.$emit("loading",!1),n.finish(5);case 8:case"end":return n.stop()}}),n,null,[[2,,5,8]])})))()},disconnect:function(e){var t=this;return(0,v.Z)(regeneratorRuntime.mark((function n(){return regeneratorRuntime.wrap((function(n){while(1)switch(n.prev=n.next){case 0:return e.stopPropagation(),t.$emit("loading",!0),n.prev=2,n.next=5,t.request("bluetooth.disconnect",{device:t.parent.address});case 5:return n.prev=5,t.$emit("loading",!1),n.finish(5);case 8:case"end":return n.stop()}}),n,null,[[2,,5,8]])})))()}}},y=n(3744);const w=(0,y.Z)(g,[["render",p],["__scopeId","data-v-a94a2cfa"]]);var b=w},3459:function(e,t,n){n.r(t),n.d(t,{default:function(){return f}});var o=n(6252),i=n(3577),r=n(3540),l={key:0,src:r,class:"loading"},c={key:1,class:"fas fa-circle-exclamation error"};function a(e,t,n,r,a,u){var s=(0,o.up)("Icon");return(0,o.wg)(),(0,o.iD)("div",{class:(0,i.C_)(["entity-icon-container",{"with-color-fill":!!u.colorFill}]),style:(0,i.j5)(u.colorFillStyle)},[n.loading?((0,o.wg)(),(0,o.iD)("img",l)):n.error?((0,o.wg)(),(0,o.iD)("i",c)):((0,o.wg)(),(0,o.j4)(s,(0,i.vs)((0,o.dG)({key:2},u.computedIconNormalized)),null,16))],6)}var u=n(4648),s=(n(7941),n(7042),n(1478)),d={name:"EntityIcon",components:{Icon:s.Z},props:{loading:{type:Boolean,default:!1},error:{type:Boolean,default:!1},entity:{type:Object,required:!0},icon:{type:Object,default:function(){}},hasColorFill:{type:Boolean,default:!1}},data:function(){return{component:null,modalVisible:!1}},computed:{computedIcon:function(){var e,t,n=(0,u.Z)({},(null===(e=this.entity)||void 0===e||null===(t=e.meta)||void 0===t?void 0:t.icon)||{});return Object.keys(this.icon||{}).length&&(n=this.icon),(0,u.Z)({},n)},colorFill:function(){return this.hasColorFill&&this.computedIcon.color},colorFillStyle:function(){return this.colorFill&&!this.error?{background:this.colorFill}:{}},computedIconNormalized:function(){var e=(0,u.Z)({},this.computedIcon);return this.colorFill&&delete e.color,e},type:function(){var e=this.entity.type||"";return e.charAt(0).toUpperCase()+e.slice(1)}}},p=n(3744);const v=(0,p.Z)(d,[["render",a],["__scopeId","data-v-4fad24e6"]]);var f=v},3540:function(e,t,n){e.exports=n.p+"static/img/spinner.c0bee445.gif"}}]);
//# sourceMappingURL=3077-legacy.f26a945c.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
"use strict";(self["webpackChunkplatypush"]=self["webpackChunkplatypush"]||[]).push([[3077,3459],{6:function(t,e,n){n.d(e,{Z:function(){return v}});var o=n(6252),i=n(3577),l=n(9963);const a=t=>((0,o.dD)("data-v-a6396ae8"),t=t(),(0,o.Cn)(),t),c=["checked"],s=a((()=>(0,o._)("div",{class:"switch"},[(0,o._)("div",{class:"dot"})],-1))),r={class:"label"};function d(t,e,n,a,d,u){return(0,o.wg)(),(0,o.iD)("div",{class:(0,i.C_)(["power-switch",{disabled:n.disabled}]),onClick:e[0]||(e[0]=(0,l.iM)(((...t)=>u.onInput&&u.onInput(...t)),["stop"]))},[(0,o._)("input",{type:"checkbox",checked:n.value},null,8,c),(0,o._)("label",null,[s,(0,o._)("span",r,[(0,o.WI)(t.$slots,"default",{},void 0,!0)])])],2)}var u={name:"ToggleSwitch",emits:["input"],props:{value:{type:Boolean,default:!1},disabled:{type:Boolean,default:!1}},methods:{onInput(t){if(this.disabled)return!1;this.$emit("input",t)}}},p=n(3744);const h=(0,p.Z)(u,[["render",d],["__scopeId","data-v-a6396ae8"]]);var v=h},3077:function(t,e,n){n.r(e),n.d(e,{default:function(){return b}});var o=n(6252),i=n(3577),l=n(9963);const a={class:"entity bluetooth-service-container"},c={class:"head"},s={class:"col-1 icon"},r={class:"col-9 label"},d=["textContent"],u={class:"col-2 connector pull-right"};function p(t,e,n,p,h,v){const y=(0,o.up)("EntityIcon"),f=(0,o.up)("ToggleSwitch");return(0,o.wg)(),(0,o.iD)("div",a,[(0,o._)("div",c,[(0,o._)("div",s,[(0,o.Wm)(y,{entity:t.value,loading:t.loading,error:t.error},null,8,["entity","loading","error"])]),(0,o._)("div",r,[(0,o._)("div",{class:"name",textContent:(0,i.zw)(t.value.name)},null,8,d)]),(0,o._)("div",u,[(0,o.Wm)(f,{value:t.parent.connected,disabled:t.loading,onInput:v.connect,onClick:e[0]||(e[0]=(0,l.iM)((()=>{}),["stop"]))},null,8,["value","disabled","onInput"])])])])}var h=n(6),v=n(3459),y=n(7909),f={name:"BluetoothService",components:{ToggleSwitch:h.Z,EntityIcon:v["default"]},mixins:[y["default"]],methods:{async connect(t){t.stopPropagation(),this.$emit("loading",!0);try{await this.request("bluetooth.connect",{device:this.parent.address,service_uuid:this.uuid})}finally{this.$emit("loading",!1)}},async disconnect(t){t.stopPropagation(),this.$emit("loading",!0);try{await this.request("bluetooth.disconnect",{device:this.parent.address})}finally{this.$emit("loading",!1)}}}},m=n(3744);const g=(0,m.Z)(f,[["render",p],["__scopeId","data-v-a94a2cfa"]]);var b=g},3459:function(t,e,n){n.r(e),n.d(e,{default:function(){return h}});var o=n(6252),i=n(3577),l=n(3540);const a={key:0,src:l,class:"loading"},c={key:1,class:"fas fa-circle-exclamation error"};function s(t,e,n,l,s,r){const d=(0,o.up)("Icon");return(0,o.wg)(),(0,o.iD)("div",{class:(0,i.C_)(["entity-icon-container",{"with-color-fill":!!r.colorFill}]),style:(0,i.j5)(r.colorFillStyle)},[n.loading?((0,o.wg)(),(0,o.iD)("img",a)):n.error?((0,o.wg)(),(0,o.iD)("i",c)):((0,o.wg)(),(0,o.j4)(d,(0,i.vs)((0,o.dG)({key:2},r.computedIconNormalized)),null,16))],6)}var r=n(1478),d={name:"EntityIcon",components:{Icon:r.Z},props:{loading:{type:Boolean,default:!1},error:{type:Boolean,default:!1},entity:{type:Object,required:!0},icon:{type:Object,default:()=>{}},hasColorFill:{type:Boolean,default:!1}},data(){return{component:null,modalVisible:!1}},computed:{computedIcon(){let t={...this.entity?.meta?.icon||{}};return Object.keys(this.icon||{}).length&&(t=this.icon),{...t}},colorFill(){return this.hasColorFill&&this.computedIcon.color},colorFillStyle(){return this.colorFill&&!this.error?{background:this.colorFill}:{}},computedIconNormalized(){const t={...this.computedIcon};return this.colorFill&&delete t.color,t},type(){let t=this.entity.type||"";return t.charAt(0).toUpperCase()+t.slice(1)}}},u=n(3744);const p=(0,u.Z)(d,[["render",s],["__scopeId","data-v-4fad24e6"]]);var h=p},3540:function(t,e,n){t.exports=n.p+"static/img/spinner.c0bee445.gif"}}]);
//# sourceMappingURL=3077.af4019ef.js.map

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,4 +1,7 @@
{
"bluetooth": {
"class": "fab fa-bluetooth"
},
"camera.android.ipcam": {
"class": "fab fa-android"
},

View File

@ -0,0 +1,75 @@
<template>
<div class="entity bluetooth-service-container">
<div class="head">
<div class="col-1 icon">
<EntityIcon
:entity="value"
:loading="loading"
:error="error" />
</div>
<div class="col-9 label">
<div class="name" v-text="value.name" />
</div>
<div class="col-2 connector pull-right">
<ToggleSwitch
:value="parent.connected"
:disabled="loading"
@input="connect"
@click.stop />
</div>
</div>
</div>
</template>
<script>
import ToggleSwitch from "@/components/elements/ToggleSwitch"
import EntityIcon from "./EntityIcon"
import EntityMixin from "./EntityMixin"
export default {
name: 'BluetoothService',
components: {ToggleSwitch, EntityIcon},
mixins: [EntityMixin],
methods: {
async connect(event) {
event.stopPropagation()
this.$emit('loading', true)
try {
await this.request('bluetooth.connect', {
device: this.parent.address,
service_uuid: this.uuid,
})
} finally {
this.$emit('loading', false)
}
},
async disconnect(event) {
event.stopPropagation()
this.$emit('loading', true)
try {
await this.request('bluetooth.disconnect', {
device: this.parent.address,
})
} finally {
this.$emit('loading', false)
}
},
},
}
</script>
<style lang="scss" scoped>
@import "common";
.switch-container {
.switch {
direction: rtl;
}
}
</style>

View File

@ -7,6 +7,7 @@
<component
:is="component"
:value="value"
:parent="parent"
:loading="loading"
ref="instance"
:error="error || value?.reachable == false"
@ -26,6 +27,7 @@
<div class="child" v-for="entity in computedChildren" :key="entity.id">
<Entity
:value="entity"
:parent="value"
:loading="loading"
:level="level + 1"
@input="$emit('input', entity)" />

View File

@ -21,6 +21,11 @@ export default {
required: true,
},
parent: {
type: Object,
default: () => {},
},
children: {
type: Object,
default: () => {},

View File

@ -39,6 +39,14 @@
}
},
"bluetooth_service": {
"name": "Service",
"name_plural": "Services",
"icon": {
"class": "fas fa-satellite-dish"
}
},
"device": {
"name": "Device",
"name_plural": "Devices",

View File

@ -5,6 +5,7 @@ from typing import Collection, Optional
from ._base import (
Entity,
EntityKey,
EntitySavedCallback,
get_entities_registry,
init_entities_db,
@ -80,6 +81,7 @@ __all__ = (
'DimmerEntityManager',
'EntitiesEngine',
'Entity',
'EntityKey',
'EntityManager',
'EntitySavedCallback',
'EnumSwitchEntityManager',

View File

@ -27,6 +27,11 @@ from platypush.message import JSONAble
EntityRegistryType = Dict[str, Type['Entity']]
entities_registry: EntityRegistryType = {}
EntityKey = Tuple[str, str]
""" The entity's logical key, as an ``<external_id, plugin>`` tuple. """
EntityMapping = Dict[EntityKey, 'Entity']
""" Internal mapping for entities used for deduplication/merge/upsert. """
_import_error_ignored_modules: Final[Set[str]] = {'bluetooth'}
"""
ImportError exceptions will be ignored for these entity submodules when
@ -110,7 +115,7 @@ if 'entity' not in Base.metadata:
return tuple(inspector.mapper.column_attrs)
@property
def entity_key(self) -> Tuple[str, str]:
def entity_key(self) -> EntityKey:
"""
This method returns the "external" key of an entity.
"""

View File

@ -1,13 +1,13 @@
from logging import getLogger
from threading import Thread, Event
from typing import Dict, Optional, Tuple
from typing import Dict, Optional
from platypush.context import get_bus
from platypush.entities import Entity
from platypush.message.event.entities import EntityUpdateEvent
from platypush.utils import set_thread_name
from platypush.entities._base import EntitySavedCallback
from platypush.entities._base import EntityKey, EntitySavedCallback
from platypush.entities._engine.queue import EntitiesQueue
from platypush.entities._engine.repo import EntitiesRepository
@ -46,7 +46,7 @@ class EntitiesEngine(Thread):
""" Queue where all entity upsert requests are received."""
self._repo = EntitiesRepository()
""" The repository of the processed entities. """
self._callbacks: Dict[Tuple[str, str], EntitySavedCallback] = {}
self._callbacks: Dict[EntityKey, EntitySavedCallback] = {}
""" (external_id, plugin) -> callback mapping"""
def post(self, *entities: Entity, callback: Optional[EntitySavedCallback] = None):

View File

@ -1,9 +1,9 @@
import logging
from typing import Dict, Iterable, Tuple
from typing import Dict, Iterable, Optional, Tuple
from sqlalchemy.orm import Session
from platypush.entities import Entity
from platypush.entities._base import Entity, EntityMapping
# pylint: disable=no-name-in-module
from platypush.entities._engine.repo.db import EntitiesDb
@ -20,7 +20,7 @@ class EntitiesRepository:
def __init__(self):
self._db = EntitiesDb()
self._merger = EntitiesMerger(self)
self._merge = EntitiesMerger()
def get(
self, session: Session, entities: Iterable[Entity]
@ -43,7 +43,63 @@ class EntitiesRepository:
autocommit=False,
expire_on_commit=False,
) as session:
merged_entities = self._merger.merge(session, entities)
merged_entities = self._merge(
session,
entities,
existing_entities=self._fetch_all_and_flatten(session, entities),
)
merged_entities = self._db.upsert(session, merged_entities)
return merged_entities
def _fetch_all_and_flatten(
self,
session: Session,
entities: Iterable[Entity],
) -> EntityMapping:
"""
Given a collection of entities, retrieves their persisted instances
(lookup is performed by ``entity_key``), and it also recursively
expands their relationships, so the session is updated with the latest
persisted versions of all the objects in the hierarchy.
:return: An ``entity_key -> entity`` mapping.
"""
expanded_entities = {}
for entity in entities:
root_entity = self._get_root_entity(session, entity)
expanded_entities.update(self._expand_children([root_entity]))
expanded_entities.update(self._expand_children([entity]))
return self.get(session, expanded_entities.values())
@classmethod
def _expand_children(
cls,
entities: Iterable[Entity],
all_entities: Optional[EntityMapping] = None,
) -> EntityMapping:
"""
Recursively expands and flattens all the children of a set of entities
into an ``entity_key -> entity`` mapping.
"""
all_entities = all_entities or {}
for entity in entities:
all_entities[entity.entity_key] = entity
cls._expand_children(entity.children, all_entities)
return all_entities
def _get_root_entity(self, session: Session, entity: Entity) -> Entity:
"""
Retrieve the root entity (i.e. the one with a null parent) of an
entity.
"""
parent = entity
while parent:
parent = self._merge.get_parent(session, entity)
if parent:
entity = parent
return entity

View File

@ -6,7 +6,7 @@ from sqlalchemy import and_, or_
from sqlalchemy.orm import Session
from platypush.context import get_plugin
from platypush.entities import Entity
from platypush.entities._base import Entity
@dataclass

View File

@ -1,34 +1,30 @@
from typing import Dict, Iterable, List, Optional, Tuple
from typing import Iterable, List, Optional
from sqlalchemy.orm import Session, exc
from platypush.entities import Entity
from platypush.entities._base import Entity, EntityMapping
# pylint: disable=too-few-public-methods
class EntitiesMerger:
"""
This object is in charge of detecting and merging entities that already
exist on the database before flushing the session.
A stateless functor in charge of detecting and merging entities that
already exist on the database before flushing the session.
"""
def __init__(self, repository):
from . import EntitiesRepository
self._repo: EntitiesRepository = repository
def merge(
def __call__(
self,
session: Session,
entities: Iterable[Entity],
existing_entities: Optional[EntityMapping] = None,
) -> List[Entity]:
"""
Merge a set of entities with their existing representations and update
the parent/child relationships and return a tuple with
``[new_entities, updated_entities]``.
the parent/child relationships and return a list containing
``[*updated_entities, *new_entities]``.
"""
new_entities: Dict[Tuple[str, str], Entity] = {}
existing_entities: Dict[Tuple[str, str], Entity] = {}
existing_entities = existing_entities or {}
new_entities: EntityMapping = {}
self._merge(
session,
@ -37,156 +33,164 @@ class EntitiesMerger:
existing_entities=existing_entities,
)
return [*existing_entities.values(), *new_entities.values()]
return list({**existing_entities, **new_entities}.values())
def _merge(
self,
session: Session,
entities: Iterable[Entity],
new_entities: Dict[Tuple[str, str], Entity],
existing_entities: Dict[Tuple[str, str], Entity],
new_entities: EntityMapping,
existing_entities: EntityMapping,
) -> List[Entity]:
"""
(Recursive) inner implementation of the entity merge logic.
"""
processed_entities = []
existing_entities.update(self._repo.get(session, entities))
# Make sure that we have no duplicate entity keys in the current batch
entities = list(
{
**({e.entity_key: e for e in entities}),
**(
{
e.entity_key: e
for e in {str(ee.id): ee for ee in entities if ee.id}.values()
}
),
}.values()
)
# Retrieve existing records and merge them
for entity in entities:
key = entity.entity_key
existing_entity = existing_entities.get(key, new_entities.get(key))
parent_id, parent = self._update_parent(session, entity, new_entities)
# Synchronize the parent(s)
entity = self._sync_parent(session, entity, new_entities, existing_entities)
if existing_entity:
# Update the parent
if not parent_id and parent:
existing_entity.parent = parent
else:
existing_entity.parent_id = parent_id
# Merge the other columns
self._merge_columns(entity, existing_entity)
# Merge the columns with those of the existing entity
existing_entity = self._merge_columns(entity, existing_entity)
# Merge the children
self._merge(session, entity.children, new_entities, existing_entities)
# Use the updated version of the existing entity.
self._append_children(
existing_entity,
*self._merge(
session,
entity.children,
new_entities,
existing_entities,
)
)
# Use the existing entity now that it's been merged
entity = existing_entity
else:
# Add it to the map of new entities if the entity doesn't exist
# on the repo
# Add it to the map of new entities if the entity doesn't exist on the db
new_entities[key] = entity
processed_entities.append(entity)
return processed_entities
def _update_parent(
self,
@classmethod
def _sync_parent(
cls,
session: Session,
entity: Entity,
new_entities: Dict[Tuple[str, str], Entity],
) -> Tuple[Optional[int], Optional[Entity]]:
new_entities: EntityMapping,
existing_entities: EntityMapping,
) -> Entity:
"""
Recursively update the hierarchy of an entity, moving upwards towards
the parent.
Recursively refresh the parent of an entity all the way up in the
hierarchy, to make sure that all the parent/child relations are
appropriately rewired and that all the relevant objects are added to
this session.
"""
parent_id: Optional[int] = entity.parent_id
try:
parent: Optional[Entity] = entity.parent
except exc.DetachedInstanceError:
# Dirty fix for `Parent instance <...> is not bound to a Session;
# lazy load operation of attribute 'parent' cannot proceed
parent = session.query(Entity).get(parent_id) if parent_id else None
parent = cls.get_parent(session, entity)
if not parent:
# No parent -> we can terminate the recursive climbing
return entity
# If the entity has a parent with an ID, use that
if parent and parent.id:
parent_id = parent_id or parent.id
# Check if an entity with the same key as the reported parent already
# exists in the cached entities
existing_parent = existing_entities.get(
parent.entity_key, new_entities.get(parent.entity_key)
)
# If there's no parent_id but there is a parent object, try to fetch
# its stored version
if not parent_id and parent:
batch = list(self._repo.get(session, [parent]).values())
if not existing_parent:
# No existing parent -> we need to flush the one reported by this
# entity
return entity
# If the parent is already stored, use its ID
if batch:
parent = batch[0]
parent_id = parent.id
# Check if the existing parent already has a child with the same key as
# this entity
existing_entity = next(
iter(
child
for child in existing_parent.children
if child.entity_key == entity.entity_key
),
None,
)
# Otherwise, check if its key is already among those awaiting flush
# and reuse the same objects (prevents SQLAlchemy from generating
# duplicate inserts)
else:
temp_entity = new_entities.get(parent.entity_key)
if temp_entity:
self._remove_duplicate_children(entity, temp_entity)
parent = entity.parent = temp_entity
else:
new_entities[parent.entity_key] = parent
# Recursively apply any changes up in the hierarchy
self._update_parent(session, parent, new_entities=new_entities)
# If we found a parent_id, populate it on the entity (and remove the
# supporting relationship object so SQLAlchemy doesn't go nuts when
# flushing)
if parent_id:
if not existing_entity:
# If this entity isn't currently a member of the existing parent,
# temporarily reset the parent of the current entity, so we won't
# carry stale objects around. We will soon rewire it to the
# existing parent.
entity.parent = None
entity.parent_id = parent_id
else:
# Otherwise, merge the columns of the existing entity with those of
# the new entity and use the existing entity
entity = cls._merge_columns(entity, existing_entity)
return parent_id, parent
# Refresh the existing collection of children with the new/updated
# entity
cls._append_children(existing_parent, entity)
# Recursively call this function to synchronize any parent entities up
# in the taxonomy
cls._sync_parent(session, existing_parent, new_entities, existing_entities)
return entity
@staticmethod
def _remove_duplicate_children(entity: Entity, parent: Optional[Entity] = None):
if not parent:
return
# Make sure that an entity has no duplicate entity IDs among its
# children
existing_child_index_by_id = None
if entity.id:
try:
existing_child_index_by_id = [e.id for e in parent.children].index(
entity.id
)
parent.children.pop(existing_child_index_by_id)
except ValueError:
pass
# Make sure that an entity has no duplicate entity keys among its
# children
existing_child_index_by_key = None
def get_parent(session: Session, entity: Entity) -> Optional[Entity]:
"""
Gets the parent of an entity, and it fetches if it's not available in
the current session.
"""
try:
existing_child_index_by_key = [e.entity_key for e in parent.children].index(
entity.entity_key
return entity.parent
except exc.DetachedInstanceError:
# Dirty fix for `Parent instance <...> is not bound to a Session;
# lazy load operation of attribute 'parent' cannot proceed`
return (
session.query(Entity).get(entity.parent_id)
if entity.parent_id
else None
)
parent.children.pop(existing_child_index_by_key)
except ValueError:
pass
@classmethod
def _merge_columns(cls, entity: Entity, existing_entity: Entity) -> Entity:
@staticmethod
def _append_children(entity: Entity, *children: Entity):
"""
Update the list of children of a given entity with the given list of
entities.
Note that, in case of ``entity_key`` conflict (the key of a new entity
already exists in the entity's children), the most recent version will
be used, so any column merge logic needs to happen before this method
is called.
"""
entity.children = list(
{
**{e.entity_key: e for e in entity.children},
**{e.entity_key: e for e in children},
}.values()
)
for child in children:
child.parent = entity
if entity.id:
child.parent_id = entity.id
@staticmethod
def _merge_columns(entity: Entity, existing_entity: Entity) -> Entity:
"""
Merge two versions of an entity column by column.
"""
columns = [col.key for col in entity.columns]
for col in columns:
if col == 'meta':
existing_entity.meta = {
**(existing_entity.meta or {}),
**(entity.meta or {}),
existing_entity.meta = { # type: ignore
**(existing_entity.meta or {}), # type: ignore
**(entity.meta or {}), # type: ignore
}
elif col not in ('id', 'created_at'):
setattr(existing_entity, col, getattr(entity, col))

View File

@ -158,10 +158,6 @@ if 'bluetooth_device' not in Base.metadata:
def to_dict(self):
"""
Overwrites ``to_dict`` to transform private column names into their
public representation, and also include the exposed services and
child entities.
public representation.
"""
return {
**{k.lstrip('_'): v for k, v in super().to_dict().items()},
'children': [child.to_dict() for child in self.children],
}
return {k.lstrip('_'): v for k, v in super().to_dict().items()}

View File

@ -48,6 +48,9 @@ if 'bluetooth_service' not in Base.metadata:
is_ble = Column(Boolean, default=False)
""" Whether the service is a BLE service. """
connected = Column(Boolean, default=False)
""" Whether an active connection exists to this service. """
__mapper_args__ = {
'polymorphic_identity': __tablename__,
}

View File

@ -216,7 +216,7 @@ def _parse_services(device: BLEDevice) -> List[BluetoothService]:
BluetoothService(
id=f'{device.address}:{uuid}',
uuid=uuid,
name=str(srv_cls),
name=f'[{uuid}]' if srv_cls == ServiceClass.UNKNOWN else str(srv_cls),
protocol=Protocol.L2CAP,
is_ble=True,
)
@ -236,9 +236,7 @@ def device_to_entity(device: BLEDevice, data: AdvertisementData) -> BluetoothDev
theengs_entity = _parse_advertisement_data(data)
props = (device.details or {}).get('props', {})
manufacturer = theengs_entity.manufacturer or company.get(
list(device.metadata['manufacturer_data'].keys())[0]
if device.metadata.get('manufacturer_data', {})
else None
next(iter(key for key in device.metadata['manufacturer_data']), 0xFFFF)
)
parent_entity = BluetoothDevice(
@ -279,8 +277,8 @@ def device_to_entity(device: BLEDevice, data: AdvertisementData) -> BluetoothDev
# Skip entities that we couldn't parse.
continue
entity.id = f'{parent_entity.id}:{prop}'
entity.name = prop
entity.id = f'{parent_entity.address}::{prop}'
entity.name = prop.title()
parent_entity.children.append(entity)
entity.parent = parent_entity

View File

@ -174,15 +174,23 @@ class LegacyManager(BaseBluetoothManager):
raise AssertionError(f'Connection to {device} timed out') from e
dev.connected = True
conn.service.connected = True
self.notify(BluetoothDeviceConnectedEvent, dev)
yield conn
# Close the connection once the context is over
with self._connection_locks[conn.key]:
conn.close()
try:
conn.close()
except Exception as e:
self.logger.warning(
'Error while closing the connection to %s: %s', device, e
)
self._connections.pop(conn.key, None)
dev.connected = False
conn.service.connected = False
self.notify(BluetoothDeviceDisconnectedEvent, dev)
@override

View File

@ -86,7 +86,7 @@ class SwitchTplinkPlugin(RunnablePlugin, SwitchEntityManager):
devices: Optional[Mapping[str, SmartDevice]] = None,
publish_entities: bool = True,
):
for (addr, info) in self._static_devices.items():
for addr, info in self._static_devices.items():
try:
dev = info['type'](addr)
self._alias_to_dev[info.get('name', dev.alias)] = dev
@ -94,7 +94,7 @@ class SwitchTplinkPlugin(RunnablePlugin, SwitchEntityManager):
except SmartDeviceException as e:
self.logger.warning('Could not communicate with device %s: %s', addr, e)
for (ip, dev) in (devices or {}).items():
for ip, dev in (devices or {}).items():
self._ip_to_dev[ip] = dev
self._alias_to_dev[dev.alias] = dev
@ -225,7 +225,7 @@ class SwitchTplinkPlugin(RunnablePlugin, SwitchEntityManager):
return [self._serialize(dev) for dev in self._scan().values()]
def main(self):
devices = {ip: self._serialize(dev) for ip, dev in self._ip_to_dev}
devices = {ip: self._serialize(dev) for ip, dev in self._ip_to_dev.items()}
while not self.should_stop():
new_devices = self._scan(publish_entities=False)