Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
wattsworth
/
lumen-api
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
4df50754
authored
Apr 25, 2017
by
John Doe
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
added discrete|continuous|event display type
parent
df09e963
Hide whitespace changes
Inline
Side-by-side
Showing
17 changed files
with
232 additions
and
152 deletions
app/adapters/db_adapter.rb
app/controllers/db_elements_controller.rb
app/controllers/db_streams_controller.rb
app/models/db_element.rb
app/services/data/load_element_data.rb
app/services/data/load_stream_data.rb
app/services/db/update_db.rb
app/services/db_stream/update_stream.rb
db/migrate/20170422182154_add_type_to_db_elements.rb
db/migrate/20170422184639_rename_type.rb
db/schema.rb
spec/controllers/db_elements_controller_spec.rb
spec/factories/db_element.rb
spec/factories/db_stream.rb
spec/models/db_element_spec.rb
spec/services/data/load_element_data_spec.rb
spec/services/data/load_stream_data_spec.rb
app/adapters/db_adapter.rb
View file @
4df50754
...
...
@@ -44,10 +44,8 @@ class DbAdapter
rescue
return
nil
end
#if the url exists but is not a nilm...
unless
resp
.
parsed_response
.
respond_to?
(
:map
)
return
nil
end
# if the url exists but is not a nilm...
return
nil
unless
resp
.
parsed_response
.
respond_to?
(
:map
)
resp
.
parsed_response
.
map
do
|
entry
|
metadata
=
if
entry
[
0
].
match
(
UpdateStream
.
decimation_tag
).
nil?
__get_metadata
(
entry
[
0
])
...
...
@@ -57,6 +55,18 @@ class DbAdapter
# The streams are not pure attributes, pull them out
elements
=
metadata
.
delete
(
:streams
)
||
[]
elements
.
each
(
&
:symbolize_keys!
)
# map the legacy discrete flag to new type setting
# discrete == True => type = event
# discrete == False => type = continuous
elements
.
map!
do
|
e
|
next
unless
e
[
:type
].
nil?
e
[
:display_type
]
=
if
e
[
:discrete
]
'event'
else
'continuous'
end
e
end
# Create the schema:
# 3 elements: path, attributes, elements
{
...
...
@@ -84,50 +94,44 @@ class DbAdapter
end
def
get_count
(
path
,
start_time
,
end_time
)
begin
resp
=
self
.
class
.
get
(
"
#{
@url
}
/stream/extract"
,
:query
=>
{
:path
=>
path
,
:start
=>
start_time
,
:end
=>
end_time
,
:count
=>
1
})
return
nil
unless
resp
.
success?
return
resp
.
parsed_response
.
to_i
rescue
return
nil
end
resp
=
self
.
class
.
get
(
"
#{
@url
}
/stream/extract"
,
query:
{
path:
path
,
start:
start_time
,
end:
end_time
,
count:
1
})
return
nil
unless
resp
.
success?
return
resp
.
parsed_response
.
to_i
rescue
return
nil
end
def
get_data
(
path
,
start_time
,
end_time
)
begin
resp
=
self
.
class
.
get
(
"
#{
@url
}
/stream/extract"
,
:query
=>
{
:path
=>
path
,
:start
=>
start_time
,
:end
=>
end_time
,
:markup
=>
1
})
return
nil
unless
resp
.
success?
return
__parse_data
(
resp
.
parsed_response
)
rescue
return
nil
end
resp
=
self
.
class
.
get
(
"
#{
@url
}
/stream/extract"
,
query:
{
path:
path
,
start:
start_time
,
end:
end_time
,
markup:
1
})
return
nil
unless
resp
.
success?
return
__parse_data
(
resp
.
parsed_response
)
rescue
return
nil
end
def
get_intervals
(
path
,
start_time
,
end_time
)
begin
resp
=
self
.
class
.
get
(
"
#{
@url
}
/stream/intervals"
,
:query
=>
{
:path
=>
path
,
:start
=>
start_time
,
:end
=>
end_time
})
return
nil
unless
resp
.
success?
return
__parse_intervals
(
resp
.
parsed_response
)
rescue
return
nil
end
resp
=
self
.
class
.
get
(
"
#{
@url
}
/stream/intervals"
,
query:
{
path:
path
,
start:
start_time
,
end:
end_time
})
return
nil
unless
resp
.
success?
return
__parse_intervals
(
resp
.
parsed_response
)
rescue
return
nil
end
def
_set_path_metadata
(
path
,
data
)
...
...
@@ -163,7 +167,9 @@ class DbAdapter
# elements are called streams in the nilmdb metadata
# and they don't have id or timestamp fields
attribs
[
:streams
]
=
db_stream
.
db_elements
.
map
do
|
e
|
e
.
attributes
.
except
(
'id'
,
'created_at'
,
'updated_at'
,
'db_stream_id'
)
vals
=
e
.
attributes
.
except
(
'id'
,
'created_at'
,
'updated_at'
,
'db_stream_id'
)
vals
[
:discrete
]
=
e
.
display_type
==
'event'
vals
end
{
config_key__:
attribs
.
to_json
}.
to_json
end
...
...
@@ -200,45 +206,41 @@ class DbAdapter
# create an array from string response
def
__parse_data
(
resp
)
return
[]
if
resp
==
nil
#
no data returned
return
[]
if
resp
.
nil?
#
no data returned
data
=
[]
add_break
=
false
resp
.
split
(
"
\n
"
).
each
do
|
row
|
next
if
row
.
length
==
0
#last row is empty (\n)
words
=
row
.
split
(
" "
)
#check if this is an interval
if
(
words
[
0
]
==
"#"
)
#this is a comment line, check if it is an interval boundary marker
if
(
words
[
1
]
==
"interval-start"
)
intervalStart
=
words
[
2
].
to_i
end
if
(
words
[
1
]
==
"interval-end"
)
intervalEnd
=
words
[
2
].
to_i
if
(
intervalEnd!
=
intervalStart
)
add_break
=
true
end
end
next
end
data
.
push
(
nil
)
if
(
add_break
)
#add a data break
add_break
=
false
#this is a normal row
data
.
push
(
words
.
map
(
&
:to_f
))
next
if
row
.
empty?
# last row is empty (\n)
words
=
row
.
split
(
' '
)
# check if this is an interval
if
words
[
0
]
==
'#'
# this is a comment line, check if it is an interval boundary marker
intervalStart
=
words
[
2
].
to_i
if
words
[
1
]
==
'interval-start'
if
words
[
1
]
==
'interval-end'
intervalEnd
=
words
[
2
].
to_i
add_break
=
true
if
intervalEnd
!=
intervalStart
end
next
end
data
.
push
(
nil
)
if
add_break
# add a data break
add_break
=
false
# this is a normal row
data
.
push
(
words
.
map
(
&
:to_f
))
end
data
end
#create horizontal line segments representing
#the intervals
#
create horizontal line segments representing
#
the intervals
#
def
__parse_intervals
(
resp
)
intervals
=
JSON
.
parse
(
'['
+
resp
.
chomp
().
gsub
(
/\r\n/
,
','
)
+
']'
)
intervals
=
JSON
.
parse
(
'['
+
resp
.
chomp
.
gsub
(
/\r\n/
,
','
)
+
']'
)
data
=
[]
intervals
.
each
do
|
interval
|
data
.
push
([
interval
[
0
],
0
])
data
.
push
([
interval
[
1
],
0
])
data
.
push
(
nil
)
#
break up the intervals
data
.
push
([
interval
[
0
],
0
])
data
.
push
([
interval
[
1
],
0
])
data
.
push
(
nil
)
#
break up the intervals
end
return
data
data
end
end
app/controllers/db_elements_controller.rb
View file @
4df50754
...
...
@@ -2,16 +2,17 @@
class
DbElementsController
<
ApplicationController
before_action
:authenticate_user!
def
index
@elements
=
DbElement
.
find
(
JSON
.
decode
(
params
[
:elements
]))
# make sure the user is allowed to view these elements
@elements
.
each
do
|
elem
|
unless
current_user
.
views_nilm?
(
elem
.
db_stream
.
db
.
nilm
)
head
:unauthorized
return
end
end
end
#def index
# @elements = DbElement.find(JSON.parse(params[:elements]))
# # make sure the user is allowed to view these elements
# @elements.each do |elem|
# unless current_user.views_nilm?(elem.db_stream.db.nilm)
# head :unauthorized
# return
# end
# end
#end
def
data
req_elements
=
DbElement
.
find
(
JSON
.
parse
(
params
[
:elements
]))
...
...
app/controllers/db_streams_controller.rb
View file @
4df50754
...
...
@@ -19,8 +19,8 @@ class DbStreamsController < ApplicationController
params
.
permit
(
:name
,
:description
,
:name_abbrev
,
:hidden
,
db_elements_attributes:
[
:id
,
:name
,
:units
,
:default_max
,
:default_min
,
:scale_factor
,
:offset
,
:plottable
,
:discrete
])
:default_min
,
:scale_factor
,
:display_type
,
:offset
,
:plottable
])
end
def
set_stream
...
...
app/models/db_element.rb
View file @
4df50754
...
...
@@ -13,6 +13,8 @@ class DbElement < ApplicationRecord
validates
:scale_factor
,
presence:
true
,
numericality:
true
validates
:default_min
,
allow_nil:
true
,
numericality:
true
validates
:default_max
,
allow_nil:
true
,
numericality:
true
TYPES
=
%w(discrete continuous event)
validates
:display_type
,
:inclusion
=>
{
:in
=>
TYPES
}
# force set any validated params to acceptable
# default values this allows us to process corrupt databases
...
...
@@ -23,6 +25,7 @@ class DbElement < ApplicationRecord
self
.
default_max
=
nil
self
.
scale_factor
=
1.0
self
.
offset
=
0.0
self
.
display_type
=
'continuous'
end
def
name_path
...
...
@@ -30,7 +33,7 @@ class DbElement < ApplicationRecord
end
def
self
.
json_keys
[
:id
,
:db_stream_id
,
:name
,
:units
,
:column
,
:default_max
,
:discrete
,
:default_min
,
:scale_factor
,
:offset
,
:plottable
]
[
:id
,
:db_stream_id
,
:name
,
:units
,
:column
,
:default_max
,
:default_min
,
:scale_factor
,
:offset
,
:plottable
,
:display_type
]
end
end
app/services/data/load_element_data.rb
View file @
4df50754
...
...
@@ -49,6 +49,10 @@ class LoadElementData
.
sort
{
|
a
,
b
|
a
.
end_time
>
b
.
end_time
}
.
first
.
end_time
end
if
@start_time
>
@end_time
add_error
(
"invalid time bounds"
)
return
end
#2 pull data from streams
combined_data
=
[]
req_streams
.
each
do
|
stream
|
...
...
app/services/data/load_stream_data.rb
View file @
4df50754
...
...
@@ -33,6 +33,7 @@ class LoadStreamData
plottable_decim
=
findPlottableDecimationLevel
(
db_stream
,
valid_decim
,
start_time
,
end_time
,
resolution
)
elements
=
db_stream
.
db_elements
.
order
(
:column
)
if
plottable_decim
.
nil?
#check if its nil becuase the nilm isn't available
return
self
unless
self
.
success?
...
...
@@ -41,7 +42,7 @@ class LoadStreamData
path
=
__build_path
(
db_stream
,
valid_decim
.
level
)
resp
=
@db_adapter
.
get_intervals
(
path
,
start_time
,
end_time
)
@data_type
=
'interval'
@data
=
__build_interval_data
(
db_stream
,
resp
)
@data
=
__build_interval_data
(
elements
,
resp
)
return
self
end
# request is plottable, see if we can get the raw (level 1) data
...
...
@@ -54,10 +55,13 @@ class LoadStreamData
if
plottable_decim
.
level
==
1
@data_type
=
'raw'
@data
=
__build_raw_data
(
db_stream
,
resp
)
@data
=
__build_raw_data
(
elements
,
resp
)
else
@data_type
=
'decimated'
@data
=
__build_decimated_data
(
db_stream
,
resp
)
decimateable_elements
=
elements
.
where
(
display_type:
[
"continuous"
,
"discrete"
])
interval_elements
=
elements
.
where
(
display_type:
"event"
)
@data
=
__build_decimated_data
(
decimateable_elements
,
resp
)
+
__build_intervals_from_decimated_data
(
interval_elements
,
resp
)
end
end
...
...
@@ -150,8 +154,7 @@ class LoadStreamData
"
#{
db_stream
.
path
}
~decim-
#{
level
}
"
end
def
__build_raw_data
(
db_stream
,
resp
)
elements
=
db_stream
.
db_elements
.
order
(
:column
)
def
__build_raw_data
(
elements
,
resp
)
data
=
elements
.
map
{
|
e
|
{
id:
e
.
id
,
type:
'raw'
,
values:
[]
}
}
resp
.
each
do
|
row
|
if
row
.
nil?
# add an interval break to all the elements
...
...
@@ -166,8 +169,7 @@ class LoadStreamData
return
data
end
def
__build_decimated_data
(
db_stream
,
resp
)
elements
=
db_stream
.
db_elements
.
order
(
:column
)
def
__build_decimated_data
(
elements
,
resp
)
data
=
elements
.
map
{
|
e
|
{
id:
e
.
id
,
type:
'decimated'
,
values:
[]
}
}
resp
.
each
do
|
row
|
if
row
.
nil?
# add an interval break to all the elements
...
...
@@ -176,12 +178,13 @@ class LoadStreamData
end
ts
=
row
[
0
]
elements
.
each_with_index
do
|
elem
,
i
|
####TODO: fix offset calcs when elements is a subset
mean_offset
=
0
min_offset
=
elements
.
length
max_offset
=
elements
.
length
*
2
mean
=
__scale_value
(
row
[
1
+
i
+
mean_offset
],
elem
)
min
=
__scale_value
(
row
[
1
+
i
+
min_offset
],
elem
)
max
=
__scale_value
(
row
[
1
+
i
+
max_offset
],
elem
)
min_offset
=
elem
.
db_stream
.
db_elem
ents
.
length
max_offset
=
elem
.
db_stream
.
db_elem
ents
.
length
*
2
mean
=
__scale_value
(
row
[
1
+
elem
.
column
+
mean_offset
],
elem
)
min
=
__scale_value
(
row
[
1
+
elem
.
column
+
min_offset
],
elem
)
max
=
__scale_value
(
row
[
1
+
elem
.
column
+
max_offset
],
elem
)
tmp_min
=
[
min
,
max
].
min
max
=
[
min
,
max
].
max
min
=
tmp_min
...
...
@@ -191,11 +194,42 @@ class LoadStreamData
return
data
end
def
__build_interval_data
(
db_stream
,
resp
)
elements
=
db_stream
.
db_elements
.
order
(
:column
)
def
__build_interval_data
(
elements
,
resp
)
elements
.
map
{
|
e
|
{
id:
e
.
id
,
type:
'interval'
,
values:
resp
}
}
end
#for data that cannot be represented as decimations
# eg: events, compute intervals from the actual decimated data
def
__build_intervals_from_decimated_data
(
elements
,
resp
)
#compute intervals from resp
start_time
=
resp
.
first
[
0
]
end_time
=
resp
.
last
[
0
]
interval_start
=
start_time
interval_end
=
start_time
intervals
=
[]
resp
.
each
do
|
row
|
if
row
.
nil?
intervals
+=
[[
interval_start
,
0
],
[
interval_end
,
0
],
nil
]
interval_start
=
nil
next
end
if
interval_start
==
nil
interval_start
=
row
[
0
]
next
end
interval_end
=
row
[
0
]
end
if
interval_start
!=
nil
intervals
+=
[[
interval_start
,
0
],
[
end_time
,
0
]]
end
elements
.
map
do
|
e
|
{
id:
e
.
id
,
type:
'interval'
,
values:
intervals
}
end
end
def
__scale_value
(
value
,
element
)
(
value
.
to_f
-
element
.
offset
)
*
element
.
scale_factor
end
...
...
app/services/db/update_db.rb
View file @
4df50754
...
...
@@ -10,7 +10,6 @@ class UpdateDb
end
def
run
(
dbinfo
,
schema
)
# check to make sure dbinfo and schema are set
# if either is nil, the database is not available
if
(
dbinfo
.
nil?
||
schema
.
nil?
)
...
...
app/services/db_stream/update_stream.rb
View file @
4df50754
...
...
@@ -64,7 +64,8 @@ class UpdateStream
def
__build_elements
(
stream
:,
stream_data
:)
stream
.
column_count
.
times
do
|
x
|
element
=
stream
.
db_elements
.
find_by_column
(
x
)
element
||=
DbElement
.
new
(
db_stream:
stream
,
column:
x
)
element
||=
DbElement
.
new
(
db_stream:
stream
,
column:
x
,
display_type:
'continuous'
)
# check if there is stream metadata for column x
entry
=
stream_data
.
select
{
|
meta
|
meta
[
:column
]
==
x
}
# use the metadata if present
...
...
db/migrate/20170422182154_add_type_to_db_elements.rb
0 → 100644
View file @
4df50754
class
AddTypeToDbElements
<
ActiveRecord
::
Migration
[
5.0
]
def
change
add_column
:db_elements
,
:type
,
:string
remove_column
:db_elements
,
:discrete
end
end
db/migrate/20170422184639_rename_type.rb
0 → 100644
View file @
4df50754
class
RenameType
<
ActiveRecord
::
Migration
[
5.0
]
def
change
rename_column
:db_elements
,
:type
,
:display_type
end
end
db/schema.rb
View file @
4df50754
...
...
@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord
::
Schema
.
define
(
version:
20170
214031515
)
do
ActiveRecord
::
Schema
.
define
(
version:
20170
422184639
)
do
create_table
"db_decimations"
,
force: :cascade
do
|
t
|
t
.
integer
"start_time"
,
limit:
8
...
...
@@ -36,7 +36,7 @@ ActiveRecord::Schema.define(version: 20170214031515) do
t
.
datetime
"created_at"
,
null:
false
t
.
datetime
"updated_at"
,
null:
false
t
.
boolean
"plottable"
t
.
boolean
"discret
e"
t
.
string
"display_typ
e"
end
create_table
"db_folders"
,
force: :cascade
do
|
t
|
...
...
spec/controllers/db_elements_controller_spec.rb
View file @
4df50754
# frozen_string_literal: true
require
'rails_helper'
RSpec
.
describe
DbElementsController
,
type: :request
do
let
(
:user1
)
{
create
(
:confirmed_user
,
first_name:
'John'
)}
let
(
:user2
)
{
create
(
:confirmed_user
,
first_name:
'Sam'
)}
describe
'GET #
index
'
do
#retrieve data for elements listed by array of ids
let
(
:user1
)
{
create
(
:confirmed_user
,
first_name:
'John'
)
}
let
(
:user2
)
{
create
(
:confirmed_user
,
first_name:
'Sam'
)
}
describe
'GET #
data
'
do
#
retrieve data for elements listed by array of ids
context
'with authenticated user'
do
before
do
nilm
=
create
(
:nilm
,
admins:
[
user1
])
stream
=
create
(
:db_stream
,
elements_count:
0
,
db:
nilm
.
db
,
db_folder:
nilm
.
db
.
root_folder
)
db:
nilm
.
db
,
db_folder:
nilm
.
db
.
root_folder
)
@elem1
=
create
(
:db_element
)
@elem2
=
create
(
:db_element
)
stream
.
db_elements
<<
@elem1
stream
.
db_elements
<<
@elem2
end
it
'returns elements with data'
do
@service_data
=
[{
id:
@elem1
.
id
,
data:
'mock1'
},
{
id:
@elem2
.
id
,
data:
'mock2'
}]
@service_data
=
[{
id:
@elem1
.
id
,
data:
'mock1'
},
{
id:
@elem2
.
id
,
data:
'mock2'
}]
@mock_service
=
instance_double
(
LoadElementData
,
run:
StubService
.
new
,
success?:
true
,
notices:
[],
warnings:
[],
errors:
[],
data:
@service_data
)
run:
StubService
.
new
,
start_time:
0
,
end_time:
1
,
success?:
true
,
notices:
[],
warnings:
[],
errors:
[],
data:
@service_data
)
allow
(
LoadElementData
).
to
receive
(
:new
).
and_return
(
@mock_service
)
end
it
'returns elements with data'
do
@auth_headers
=
user1
.
create_new_auth_token
get
"/db_elements.json"
,
params:
{
elements:
[
@elem1
.
id
,
@elem2
.
id
]
,
start_time:
0
,
end_time:
100
},
headers:
@auth_headers
get
'/db_elements/data.json'
,
params:
{
elements:
[
@elem1
.
id
,
@elem2
.
id
].
to_json
,
start_time:
0
,
end_time:
100
},
headers:
@auth_headers
expect
(
response
).
to
have_http_status
(
:ok
)
# check to make sure JSON renders the elements
body
=
JSON
.
parse
(
response
.
body
)
...
...
@@ -38,32 +42,31 @@ RSpec.describe DbElementsController, type: :request do
end
it
'returns error if time bounds are invalid'
do
@auth_headers
=
user1
.
create_new_auth_token
get
"/db_elements.json"
,
params:
{
elements:
[
@elem1
.
id
,
@elem2
.
id
]
,
start_time:
100
,
end_time:
0
},
headers:
@auth_headers
get
'/db_elements/data.json'
,
params:
{
elements:
[
@elem1
.
id
,
@elem2
.
id
].
to_json
,
start_time:
100
,
end_time:
0
},
headers:
@auth_headers
expect
(
response
).
to
have_http_status
(
:unprocessable_entity
)
end
it
'only allows access to permitted elements'
do
nilm2
=
create
(
:nilm
,
admins:
[
user2
])
stream2
=
create
(
:db_stream
,
elements_count:
0
,
db:
nilm2
.
db
,
db_folder:
nilm2
.
db
.
root_folder
)
db:
nilm2
.
db
,
db_folder:
nilm2
.
db
.
root_folder
)
@elem3
=
create
(
:db_element
)
stream2
.
db_elements
<<
@elem3
@auth_headers
=
user1
.
create_new_auth_token
get
"/db_elements.json"
,
params:
{
elements:
[
@elem1
.
id
,
@elem3
.
id
]
,
start_time:
100
,
end_time:
0
},
headers:
@auth_headers
get
'/db_elements/data.json'
,
params:
{
elements:
[
@elem1
.
id
,
@elem3
.
id
].
to_json
,
start_time:
100
,
end_time:
0
},
headers:
@auth_headers
expect
(
response
).
to
have_http_status
(
:unauthorized
)
end
end
context
'without sign-in'
do
it
'returns unauthorized'
do
get
"/db_elements.json"
get
'/db_elements/data.json'
expect
(
response
).
to
have_http_status
(
:unauthorized
)
end
end
...
...
spec/factories/db_element.rb
View file @
4df50754
...
...
@@ -11,6 +11,6 @@ FactoryGirl.define do
scale_factor
1.0
offset
0.0
plottable
true
dis
crete
false
dis
play_type
'continuous'
end
end
spec/factories/db_stream.rb
View file @
4df50754
...
...
@@ -7,9 +7,10 @@ FactoryGirl.define do
name_abbrev
{
Faker
::
Lorem
.
word
}
description
{
Faker
::
Lorem
.
sentence
}
delete_locked
false
start_time
{
Faker
::
Number
.
number
(
6
)}
end_time
{
start_time
+
Faker
::
Number
.
number
(
5
)
}
size_on_disk
{
Faker
::
Number
.
number
(
6
)
}
start_time
{
Faker
::
Number
.
number
(
6
).
to_i
}
end_time
{
start_time
+
Faker
::
Number
.
number
(
5
).
to_i
}
total_time
{
end_time
-
start_time
}
size_on_disk
{
Faker
::
Number
.
number
(
6
).
to_i
}
hidden
false
path
{
"/root/streams/
#{
Faker
::
Lorem
.
unique
.
word
}
"
}
data_type
{
"float32_
#{
elements_count
}
"
}
...
...
spec/models/db_element_spec.rb
View file @
4df50754
...
...
@@ -12,7 +12,7 @@ RSpec.describe 'DbElement' do
specify
{
expect
(
db_element
).
to
respond_to
(
:scale_factor
)
}
specify
{
expect
(
db_element
).
to
respond_to
(
:offset
)
}
specify
{
expect
(
db_element
).
to
respond_to
(
:plottable
)
}
specify
{
expect
(
db_element
).
to
respond_to
(
:dis
cret
e
)
}
specify
{
expect
(
db_element
).
to
respond_to
(
:dis
play_typ
e
)
}
end
describe
'validation'
do
...
...
spec/services/data/load_element_data_spec.rb
View file @
4df50754
...
...
@@ -64,12 +64,14 @@ RSpec.describe 'LoadElementData' do
describe
'when a nilm does not respond'
do
before
do
db
=
create
(
:db
,
url:
'http://test/nilmdb'
)
@db_stream1
=
create
(
:db_stream
,
db:
db
,
elements_count:
0
)
@db_stream1
=
create
(
:db_stream
,
db:
db
,
db_folder:
db
.
root_folder
,
elements_count:
0
)
@elem0
=
create
(
:db_element
,
column:
0
,
db_stream:
@db_stream1
)
@elem1
=
create
(
:db_element
,
column:
1
,
db_stream:
@db_stream1
)
@stream1_data
=
[{
id:
@elem0
.
id
,
values:
'mock0'
},
{
id:
@elem1
.
id
,
values:
'mock1'
}]
@db_stream2
=
create
(
:db_stream
,
db:
db
,
elements_count:
0
)
@db_stream2
=
create
(
:db_stream
,
db:
db
,
db_folder:
db
.
root_folder
,
elements_count:
0
)
@elem2
=
create
(
:db_element
,
column:
2
,
db_stream:
@db_stream2
)
@elem3
=
create
(
:db_element
,
column:
3
,
db_stream:
@db_stream2
)
@stream2_data
=
[{
id:
@elem2
.
id
,
values:
'mock2'
},
...
...
spec/services/data/load_stream_data_spec.rb
View file @
4df50754
...
...
@@ -7,17 +7,20 @@ RSpec.describe 'LoadStreamData' do
describe
'with large datasets'
do
describe
'when the data is decimated'
do
before
do
@data
=
[[
98
,
0
,
1
,
2
,
-
1
,
0
,
1
,
1
,
2
,
3
],
#decimated data (3 elements)
@data
=
[[
40
,
0
,
1
,
2
,
-
1
,
0
,
1
,
1
,
2
,
3
],
nil
,
[
99
,
0
,
1
,
2
,
-
1
,
0
,
1
,
1
,
2
,
3
]]
[
50
,
0
,
1
,
2
,
-
1
,
0
,
1
,
1
,
2
,
3
]]
@db_stream
=
create
(
:db_stream
,
elements_count:
0
,
db:
db
,
decimations_count:
3
,
# lvl64
start_time:
0
,
end_time:
100
)
#create the db elements
types
=
[
'discrete'
,
'continuous'
,
'event'
]
3
.
times
do
|
i
|
@db_stream
.
db_elements
<<
create
(
:db_element
,
column:
i
,
offset:
i
+
1
,
scale_factor
:i
+
2
)
column:
i
,
offset:
i
+
1
,
scale_factor
:i
+
2
,
display_type:
types
[
i
]
)
end
@mockAdapter
=
MockDataDbAdapter
.
new
(
start_time:
@db_stream
.
start_time
,
end_time:
@db_stream
.
end_time
,
...
...
@@ -46,16 +49,29 @@ RSpec.describe 'LoadStreamData' do
end
it
'populates @data structure with decimated data'
do
@service
.
run
(
@db_stream
,
10
,
90
)
expect
(
@service
.
data
.
length
).
to
eq
3
d_count
=
0
i_count
=
0
@service
.
data
.
each_with_index
do
|
data
,
i
|
elem
=
@db_stream
.
db_elements
.
find_by_column
(
i
)
expect
(
data
[
:id
]).
to
eq
elem
.
id
mean
=
__scale_value
(
i
,
elem
)
min
=
__scale_value
(
i
-
1
,
elem
)
max
=
__scale_value
(
i
+
1
,
elem
)
expect
(
data
[
:values
]).
to
eq
([[
98
,
mean
,
min
,
max
],
nil
,
[
99
,
mean
,
min
,
max
]])
if
(
elem
.
display_type
==
"discrete"
||
elem
.
display_type
==
"continuous"
)
d_count
+=
1
mean
=
__scale_value
(
i
,
elem
)
min
=
__scale_value
(
i
-
1
,
elem
)
max
=
__scale_value
(
i
+
1
,
elem
)
expect
(
data
[
:type
]).
to
eq
'decimated'
expect
(
data
[
:values
]).
to
eq
([[
40
,
mean
,
min
,
max
],
nil
,
[
50
,
mean
,
min
,
max
]])
else
i_count
+=
1
expect
(
data
[
:type
]).
to
eq
'interval'
expect
(
data
[
:values
]).
to
eq
[[
10
,
40
],[
50
,
90
]]
end
end
expect
(
d_count
).
to
eq
2
#2 decimated Streams
expect
(
i_count
).
to
eq
1
#1 interval stream
end
end
describe
'when the data is not decimated'
do
...
...
@@ -96,7 +112,7 @@ RSpec.describe 'LoadStreamData' do
end
describe
'with small datasets'
do
before
do
@data
=
[[
98
,
0
,
1
,
2
],
nil
,[
99
,
0
,
1
,
2
]]
@data
=
[[
40
,
0
,
1
,
2
],
nil
,[
50
,
0
,
1
,
2
]]
@db_stream
=
create
(
:db_stream
,
elements_count:
0
,
db:
db
,
decimations_count:
3
,
# lvl64
start_time:
0
,
end_time:
100
)
...
...
@@ -129,9 +145,9 @@ RSpec.describe 'LoadStreamData' do
@service
.
data
.
each_with_index
do
|
data
,
i
|
elem
=
@db_stream
.
db_elements
.
find_by_column
(
i
)
expect
(
data
[
:id
]).
to
eq
elem
.
id
expect
(
data
[
:values
]).
to
eq
([[
98
,(
i
-
elem
.
offset
)
*
elem
.
scale_factor
],
expect
(
data
[
:values
]).
to
eq
([[
40
,(
i
-
elem
.
offset
)
*
elem
.
scale_factor
],
nil
,
[
99
,(
i
-
elem
.
offset
)
*
elem
.
scale_factor
]])
[
50
,(
i
-
elem
.
offset
)
*
elem
.
scale_factor
]])
end
end
end
...
...
@@ -172,6 +188,8 @@ RSpec.describe 'LoadStreamData' do
end
end
end
end
def
__scale_value
(
value
,
element
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment