@@ -8,9 +8,12 @@ import {
8
8
EntityConfig ,
9
9
isEntityIdStateConfig ,
10
10
isEntityIdStatisticsConfig ,
11
- HistoryInRange ,
12
- EntityState ,
11
+ CachedEntity ,
12
+ CachedStatisticsEntity ,
13
+ CachedStateEntity ,
13
14
} from "../types" ;
15
+ import { groupBy } from "lodash" ;
16
+ import { StatisticValue } from "../recorder-types" ;
14
17
15
18
export function mapValues < T , S > (
16
19
o : Record < string , T > ,
@@ -24,7 +27,10 @@ async function fetchSingleRange(
24
27
[ startT , endT ] : number [ ] ,
25
28
significant_changes_only : boolean ,
26
29
minimal_response : boolean
27
- ) : Promise < HistoryInRange > {
30
+ ) : Promise < {
31
+ range : [ number , number ] ;
32
+ history : CachedEntity [ ] ;
33
+ } > {
28
34
// We fetch slightly more than requested (i.e the range visible in the screen). The reason is the following:
29
35
// When fetching data in a range `[startT,endT]`, Home Assistant adds a fictitious datapoint at
30
36
// the start of the fetched period containing a copy of the first datapoint that occurred before
@@ -65,7 +71,7 @@ async function fetchSingleRange(
65
71
const start = new Date ( startT - 1 ) ;
66
72
endT = Math . min ( endT , Date . now ( ) ) ;
67
73
const end = new Date ( endT ) ;
68
- let history : EntityState [ ] ;
74
+ let history : CachedEntity [ ] ;
69
75
if ( isEntityIdStatisticsConfig ( entity ) ) {
70
76
history = await fetchStatistics ( hass , entity , [ start , end ] ) ;
71
77
} else {
@@ -90,9 +96,9 @@ async function fetchSingleRange(
90
96
91
97
export function getEntityKey ( entity : EntityConfig ) {
92
98
if ( isEntityIdAttrConfig ( entity ) ) {
93
- return `${ entity . entity } ::${ entity . attribute } ` ;
99
+ return `${ entity . entity } ::attribute` ;
94
100
} else if ( isEntityIdStatisticsConfig ( entity ) ) {
95
- return `${ entity . entity } ::statistics::${ entity . statistic } :: ${ entity . period } ` ;
101
+ return `${ entity . entity } ::statistics::${ entity . period } ` ;
96
102
} else if ( isEntityIdStateConfig ( entity ) ) {
97
103
return entity . entity ;
98
104
}
@@ -102,10 +108,10 @@ export function getEntityKey(entity: EntityConfig) {
102
108
const MIN_SAFE_TIMESTAMP = Date . parse ( "0001-01-02T00:00:00.000Z" ) ;
103
109
export default class Cache {
104
110
ranges : Record < string , TimestampRange [ ] > = { } ;
105
- histories : Record < string , EntityState [ ] > = { } ;
111
+ histories : Record < string , CachedEntity [ ] > = { } ;
106
112
busy = Promise . resolve ( ) ; // mutex
107
113
108
- add ( entity : EntityConfig , states : EntityState [ ] , range : [ number , number ] ) {
114
+ add ( entity : EntityConfig , states : CachedEntity [ ] , range : [ number , number ] ) {
109
115
const entityKey = getEntityKey ( entity ) ;
110
116
let h = ( this . histories [ entityKey ] ??= [ ] ) ;
111
117
h . push ( ...states ) ;
@@ -122,13 +128,33 @@ export default class Cache {
122
128
this . ranges = { } ;
123
129
this . histories = { } ;
124
130
}
125
- getHistory ( entity : EntityConfig ) {
131
+ getHistory ( entity : EntityConfig ) : CachedEntity [ ] {
126
132
let key = getEntityKey ( entity ) ;
127
133
const history = this . histories [ key ] || [ ] ;
128
- return history . map ( ( datum ) => ( {
129
- ...datum ,
130
- timestamp : datum . timestamp + entity . offset ,
131
- } ) ) ;
134
+ if ( isEntityIdStatisticsConfig ( entity ) ) {
135
+ return ( history as CachedStatisticsEntity [ ] ) . map ( ( entry ) => ( {
136
+ ...entry ,
137
+ timestamp : entry . timestamp + entity . offset ,
138
+ value : entry [ entity . statistic ] ,
139
+ } ) ) ;
140
+ }
141
+ if ( isEntityIdAttrConfig ( entity ) ) {
142
+ return ( history as CachedStateEntity [ ] ) . map ( ( entry ) => ( {
143
+ ...entry ,
144
+ timestamp : entry . timestamp + entity . offset ,
145
+ value : entry . attributes [ entity . attribute ] ,
146
+ } ) ) ;
147
+ }
148
+ if ( isEntityIdStateConfig ( entity ) ) {
149
+ return ( history as CachedStateEntity [ ] ) . map ( ( entry ) => ( {
150
+ ...entry ,
151
+ timestamp : entry . timestamp + entity . offset ,
152
+ value : entry . state ,
153
+ } ) ) ;
154
+ }
155
+ throw new Error (
156
+ `Unrecognised fetch type for ${ ( entity as EntityConfig ) . entity } `
157
+ ) ;
132
158
}
133
159
async update (
134
160
range : TimestampRange ,
@@ -137,31 +163,37 @@ export default class Cache {
137
163
significant_changes_only : boolean ,
138
164
minimal_response : boolean
139
165
) {
140
- range = range . map ( ( n ) => Math . max ( MIN_SAFE_TIMESTAMP , n ) ) ; // HA API can't handle negative years
141
166
return ( this . busy = this . busy
142
167
. catch ( ( ) => { } )
143
168
. then ( async ( ) => {
144
- const promises = entities . map ( async ( entity ) => {
145
- const entityKey = getEntityKey ( entity ) ;
146
- this . ranges [ entityKey ] ??= [ ] ;
147
- const offsetRange = [
148
- range [ 0 ] - entity . offset ,
149
- range [ 1 ] - entity . offset ,
150
- ] ;
151
- const rangesToFetch = subtractRanges (
152
- [ offsetRange ] ,
153
- this . ranges [ entityKey ]
154
- ) ;
155
- for ( const aRange of rangesToFetch ) {
156
- const fetchedHistory = await fetchSingleRange (
157
- hass ,
158
- entity ,
159
- aRange ,
160
- significant_changes_only ,
161
- minimal_response
169
+ range = range . map ( ( n ) => Math . max ( MIN_SAFE_TIMESTAMP , n ) ) ; // HA API can't handle negative years
170
+ const parallelFetches = Object . values ( groupBy ( entities , getEntityKey ) ) ;
171
+ const promises = parallelFetches . flatMap ( async ( entityGroup ) => {
172
+ // Each entity in entityGroup will result in exactly the same fetch
173
+ // But these may differ once the offsets PR is merged
174
+ // Making these fetches sequentially ensures that the already fetched ranges of each
175
+ // request are not fetched more than once
176
+ for ( const entity of entityGroup ) {
177
+ const entityKey = getEntityKey ( entity ) ;
178
+ this . ranges [ entityKey ] ??= [ ] ;
179
+ const offsetRange = [
180
+ range [ 0 ] - entity . offset ,
181
+ range [ 1 ] - entity . offset ,
182
+ ] ;
183
+ const rangesToFetch = subtractRanges (
184
+ [ offsetRange ] ,
185
+ this . ranges [ entityKey ]
162
186
) ;
163
- if ( fetchedHistory === null ) continue ;
164
- this . add ( entity , fetchedHistory . history , fetchedHistory . range ) ;
187
+ for ( const aRange of rangesToFetch ) {
188
+ const fetchedHistory = await fetchSingleRange (
189
+ hass ,
190
+ entity ,
191
+ aRange ,
192
+ significant_changes_only ,
193
+ minimal_response
194
+ ) ;
195
+ this . add ( entity , fetchedHistory . history , fetchedHistory . range ) ;
196
+ }
165
197
}
166
198
} ) ;
167
199
0 commit comments