Commit e31a3af2 by John Doe

refactored code to use adapters instead of a direct backend HTTPclient

parent 5c9ff2b5
Showing with 1242 additions and 1115 deletions
...@@ -24,4 +24,5 @@ vendor/bundle ...@@ -24,4 +24,5 @@ vendor/bundle
spec/cassettes spec/cassettes
vagrant_boxes/*.log vagrant_boxes/*.log
vagrant_boxes/.vagrant vagrant_boxes/.vagrant
coverage coverage
\ No newline at end of file .idea
\ No newline at end of file
<component name="ProjectDictionaryState">
<dictionary name="jdonnal">
<words>
<w>dbinfo</w>
<w>nilm</w>
<w>nilmdb</w>
</words>
</dictionary>
</component>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="Rubocop" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
<option name="processCode" value="true" />
<option name="processLiterals" value="true" />
<option name="processComments" value="true" />
</inspection_tool>
</profile>
</component>
\ No newline at end of file
module Joule
class Adapter
def node_type
'joule'
end
end
end
\ No newline at end of file
#frozen_string_literal: true #frozen_string_literal: true
module Joule
# Wrapper around Joule HTTP service
class Backend
include HTTParty
default_timeout 5
open_timeout 5
read_timeout 5
# Wrapper around Joule HTTP service attr_reader :url
class JouleAdapter
include HTTParty
default_timeout 5
open_timeout 5
read_timeout 5
attr_reader :url def initialize(url)
@url = url
def initialize(url) end
@url = url
end
def module_info def module_info
begin begin
resp = self.class.get("#{@url}/modules.json") resp = self.class.get("#{@url}/modules.json")
return nil unless resp.success? return nil unless resp.success?
items = resp.parsed_response items = resp.parsed_response
# if the site exists but is not a joule server... # if the site exists but is not a joule server...
required_keys = %w(name exec_cmd) required_keys = %w(name exec_cmd)
items.each do |item| items.each do |item|
return nil unless item.respond_to?(:has_key?) && return nil unless item.respond_to?(:has_key?) &&
required_keys.all? { |s| item.key? s } required_keys.all? { |s| item.key? s }
item.symbolize_keys! item.symbolize_keys!
end
rescue
return nil
end end
rescue return items
return nil
end end
return items
end
def module_interface(joule_module, req) def module_interface(joule_module, req)
self.class.get("#{@url}/interface/#{joule_module.joule_id}/#{req}") self.class.get("#{@url}/interface/#{joule_module.joule_id}/#{req}")
end end
end
end end
module Nilmdb
class Adapter
def initialize(url)
@backend = Backend.new(url)
end
def refresh(db:)
db_service = UpdateDb.new(db: db)
db_service.run(@backend.dbinfo, @backend.schema)
end
def refresh_stream(db_stream)
entries = @backend.stream_info(db_stream)
service = UpdateStream.new(db_stream,
entries[:base_entry],
entries[:decimation_entries])
service.run
end
def save_stream(db_stream)
@backend.set_stream_metadata(db_stream)
end
def save_folder(db_folder)
@backend.set_folder_metadata(db_folder)
end
def load_data(db_stream, start_time, end_time, elements=[], resolution=nil)
data_service = LoadStreamData.new(@backend)
data_service.run(db_stream, start_time, end_time, elements, resolution)
unless data_service.success?
return nil
end
{data: data_service.data,
decimation_factor: data_service.decimation_factor}
end
def node_type
'nilmdb'
end
end
end
# frozen_string_literal: true # frozen_string_literal: true
module Nilmdb
# Wrapper around NilmDB HTTP service
class Backend
include HTTParty
default_timeout 5
open_timeout 5
read_timeout 5
# Wrapper around NilmDB HTTP service attr_reader :url
class DbAdapter
include HTTParty
default_timeout 5
open_timeout 5
read_timeout 5
attr_reader :url def initialize(url)
@url = url
def initialize(url) end
@url = url
end
def dbinfo def dbinfo
begin begin
resp = self.class.get("#{@url}/version") resp = self.class.get("#{@url}/version")
return nil unless resp.success? return nil unless resp.success?
version = resp.parsed_response version = resp.parsed_response
resp = self.class.get("#{@url}/dbinfo") resp = self.class.get("#{@url}/dbinfo")
return nil unless resp.success? return nil unless resp.success?
info = resp.parsed_response info = resp.parsed_response
rescue rescue
return nil return nil
end end
# if the site exists but is not a nilm... # if the site exists but is not a nilm...
required_keys = %w(size other free reserved) required_keys = %w(size other free reserved)
unless info.respond_to?(:has_key?) && unless info.respond_to?(:has_key?) &&
required_keys.all? { |s| info.key? s } required_keys.all? { |s| info.key? s }
return nil return nil
end
{
version: version,
size_db: info['size'],
size_other: info['other'],
size_total: info['size'] + info['other'] + info['free'] + info['reserved']
}
end end
{
version: version,
size_db: info['size'],
size_other: info['other'],
size_total: info['size'] + info['other'] + info['free'] + info['reserved']
}
end
def schema(path='') def schema(path='')
# GET extended info stream list # GET extended info stream list
begin begin
if(path.empty?) if(path.empty?)
resp = self.class.get("#{@url}/stream/list?extended=1") resp = self.class.get("#{@url}/stream/list?extended=1")
else else
resp = self.class.get("#{@url}/stream/list?path=#{path}&extended=1") resp = self.class.get("#{@url}/stream/list?path=#{path}&extended=1")
end
return nil unless resp.success?
rescue
return nil
end
# if the url exists but is not a nilm...
return nil unless resp.parsed_response.respond_to?(:map)
resp.parsed_response.map do |entry|
metadata = if entry[0].match(UpdateStream.decimation_tag).nil?
__get_metadata(entry[0])
else
{} # decimation entry, no need to pull metadata
end
# The streams are not pure attributes, pull them out
elements = metadata.delete(:streams) || []
elements.each(&:symbolize_keys!)
# Create the schema:
# 3 elements: path, attributes, elements
{
path: entry[0],
attributes: {
data_type: entry[1],
start_time: entry[2],
end_time: entry[3],
total_rows: entry[4],
total_time: entry[5]
}.merge(metadata),
elements: elements
}
end end
return nil unless resp.success?
rescue
return nil
end end
# if the url exists but is not a nilm...
return nil unless resp.parsed_response.respond_to?(:map) # return latest info about the specified stream
resp.parsed_response.map do |entry| # TODO: the HTTP API does not
metadata = if entry[0].match(UpdateStream.decimation_tag).nil? # support wild cards so no decimations are returned
__get_metadata(entry[0]) # {
else # base_entry: ...,
{} # decimation entry, no need to pull metadata # decimation_entries: [...]
end # }
# The streams are not pure attributes, pull them out # this can be fed into UpdateStream service
elements = metadata.delete(:streams) || [] def stream_info(stream)
elements.each(&:symbolize_keys!) entries = schema("#{stream.path}")
# Create the schema: base_entry = entries
# 3 elements: path, attributes, elements .select{|e| e[:path].match(UpdateStream.decimation_tag).nil?}
.first
{ {
path: entry[0], base_entry: base_entry,
attributes: { decimation_entries: entries - [base_entry] #whatever is left over
data_type: entry[1],
start_time: entry[2],
end_time: entry[3],
total_rows: entry[4],
total_time: entry[5]
}.merge(metadata),
elements: elements
} }
end end
end
# return latest info about the specified stream
# TODO: the HTTP API does not
# support wild cards so no decimations are returned
# {
# base_entry: ...,
# decimation_entries: [...]
# }
# this can be fed into UpdateStream service
def stream_info(stream)
entries = schema("#{stream.path}")
base_entry = entries
.select{|e| e[:path].match(UpdateStream.decimation_tag).nil?}
.first
{
base_entry: base_entry,
decimation_entries: entries - [base_entry] #whatever is left over
}
end
def set_folder_metadata(db_folder)
# always try to create the info stream, this fails silently if it exists
__create_stream("#{db_folder.path}/info","uint8_1")
_set_path_metadata("#{db_folder.path}/info",
__build_folder_metadata(db_folder))
end
def set_stream_metadata(db_stream) def set_folder_metadata(db_folder)
_set_path_metadata(db_stream.path, # always try to create the info stream, this fails silently if it exists
__build_stream_metadata(db_stream)) __create_stream("#{db_folder.path}/info","uint8_1")
end _set_path_metadata("#{db_folder.path}/info",
__build_folder_metadata(db_folder))
def get_count(path, start_time, end_time) end
resp = self.class.get("#{@url}/stream/extract",
query: {
path: path,
start: start_time,
end: end_time,
count: 1
})
return nil unless resp.success?
return resp.parsed_response.to_i
rescue
return nil
end
def get_data(path, start_time, end_time) def set_stream_metadata(db_stream)
resp = self.class.get("#{@url}/stream/extract", _set_path_metadata(db_stream.path,
query: { __build_stream_metadata(db_stream))
path: path, end
start: start_time,
end: end_time,
markup: 1
})
return nil unless resp.success?
return __parse_data(resp.parsed_response)
rescue
return nil
end
def get_intervals(path, start_time, end_time) def get_count(path, start_time, end_time)
resp = self.class.get("#{@url}/stream/intervals", resp = self.class.get("#{@url}/stream/extract",
query: { query: {
path: path, path: path,
start: start_time, start: start_time,
end: end_time end: end_time,
}) count: 1
return nil unless resp.success? })
return __parse_intervals(resp.parsed_response) return nil unless resp.success?
rescue return resp.parsed_response.to_i
return nil rescue
end return nil
end
def _set_path_metadata(path, data) def get_data(path, start_time, end_time)
params = { path: path, resp = self.class.get("#{@url}/stream/extract",
data: data }.to_json query: {
begin path: path,
response = self.class.post("#{@url}/stream/update_metadata", start: start_time,
body: params, end: end_time,
headers: { 'Content-Type' => 'application/json' }) markup: 1
})
return nil unless resp.success?
return __parse_data(resp.parsed_response)
rescue rescue
return { error: true, msg: 'cannot contact NilmDB server' } return nil
end end
unless response.success?
Rails.logger.warn("#{@url}: update_metadata(#{path})"\ def get_intervals(path, start_time, end_time)
" => #{response.code}:#{response.body}") resp = self.class.get("#{@url}/stream/intervals",
return { error: true, msg: "error updating #{path} metadata" } query: {
path: path,
start: start_time,
end: end_time
})
return nil unless resp.success?
return __parse_intervals(resp.parsed_response)
rescue
return nil
end end
{ error: false, msg: 'success' }
end
# convert folder attributes to __config_key json def _set_path_metadata(path, data)
def __build_folder_metadata(db_folder) params = { path: path,
attribs = db_folder.attributes data: data }.to_json
.slice('name', 'description', 'hidden') begin
.to_json response = self.class.post("#{@url}/stream/update_metadata",
{ config_key__: attribs }.to_json body: params,
end headers: { 'Content-Type' => 'application/json' })
rescue
return { error: true, msg: 'cannot contact NilmDB server' }
end
unless response.success?
Rails.logger.warn("#{@url}: update_metadata(#{path})"\
" => #{response.code}:#{response.body}")
return { error: true, msg: "error updating #{path} metadata" }
end
{ error: false, msg: 'success' }
end
# convert folder attributes to __config_key json # convert folder attributes to __config_key json
def __build_stream_metadata(db_stream) def __build_folder_metadata(db_folder)
attribs = db_stream.attributes attribs = db_folder.attributes
.slice('name', 'name_abbrev', 'description', 'hidden') .slice('name', 'description', 'hidden')
# elements are called streams in the nilmdb metadata .to_json
# and they don't have id or timestamp fields { config_key__: attribs }.to_json
attribs[:streams] = db_stream.db_elements.map do |e|
vals = e.attributes.except('id', 'created_at', 'updated_at', 'db_stream_id')
vals[:discrete] = e.display_type=='event'
vals
end end
{ config_key__: attribs.to_json }.to_json
end
# retrieve metadata for a particular stream # convert folder attributes to __config_key json
def __get_metadata(path) def __build_stream_metadata(db_stream)
dump = self.class.get("#{@url}/stream/get_metadata?path=#{path}") attribs = db_stream.attributes
# find legacy parameters in raw metadata .slice('name', 'name_abbrev', 'description', 'hidden')
#TODO: why is parsed_response a string sometimes?? <error> # elements are called streams in the nilmdb metadata
metadata = dump.parsed_response.except('config_key__') # and they don't have id or timestamp fields
# parse values from config_key entry if it exists attribs[:streams] = db_stream.db_elements.map do |e|
config_key = JSON.parse(dump.parsed_response['config_key__'] || '{}') vals = e.attributes.except('id', 'created_at', 'updated_at', 'db_stream_id')
# merge legacy data with config_key values vals[:discrete] = e.display_type=='event'
metadata.merge!(config_key) vals
# make sure nothing bad got in (eg extraneous metadata keys) end
__sanitize_metadata(metadata) { config_key__: attribs.to_json }.to_json
end end
# create a new stream on the database # retrieve metadata for a particular stream
def __create_stream(path,dtype) def __get_metadata(path)
params = { path: path, dump = self.class.get("#{@url}/stream/get_metadata?path=#{path}")
layout: dtype }.to_json # find legacy parameters in raw metadata
begin #TODO: why is parsed_response a string sometimes?? <error>
response = self.class.post("#{@url}/stream/create", metadata = dump.parsed_response.except('config_key__')
body: params, # parse values from config_key entry if it exists
headers: { 'Content-Type' => 'application/json' }) config_key = JSON.parse(dump.parsed_response['config_key__'] || '{}')
rescue # merge legacy data with config_key values
return { error: true, msg: 'cannot contact NilmDB server' } metadata.merge!(config_key)
# make sure nothing bad got in (eg extraneous metadata keys)
__sanitize_metadata(metadata)
end end
unless response.success?
Rails.logger.warn("#{@url}: create(#{path})"\ # create a new stream on the database
" => #{response.code}:#{response.body}") def __create_stream(path,dtype)
return { error: true, msg: "error creating #{path}" } params = { path: path,
layout: dtype }.to_json
begin
response = self.class.post("#{@url}/stream/create",
body: params,
headers: { 'Content-Type' => 'application/json' })
rescue
return { error: true, msg: 'cannot contact NilmDB server' }
end
unless response.success?
Rails.logger.warn("#{@url}: create(#{path})"\
" => #{response.code}:#{response.body}")
return { error: true, msg: "error creating #{path}" }
end
{ error: false, msg: 'success' }
end end
{ error: false, msg: 'success' } # make sure all the keys are valid parameters
end # this function does not know the difference between folders and streams
# make sure all the keys are valid parameters # this *should* be ok as long as nobody tinkers with the config_key__ entries
# this function does not know the difference between folders and streams def __sanitize_metadata(metadata)
# this *should* be ok as long as nobody tinkers with the config_key__ entries metadata.slice!('delete_locked', 'description', 'hidden',
def __sanitize_metadata(metadata) 'name', 'name_abbrev', 'streams')
metadata.slice!('delete_locked', 'description', 'hidden', unless metadata['streams'].nil?
'name', 'name_abbrev', 'streams') # sanitize 'streams' (elements) parameters
unless metadata['streams'].nil? element_attrs = DbElement.attribute_names.map(&:to_sym)
# sanitize 'streams' (elements) parameters metadata['streams'].map! do |element|
element_attrs = DbElement.attribute_names.map(&:to_sym) # map the legacy discrete flag to new type setting
metadata['streams'].map! do |element| # discrete == True => type = event
# map the legacy discrete flag to new type setting # discrete == False => type = continuous
# discrete == True => type = event element.symbolize_keys!
# discrete == False => type = continuous if element[:display_type].nil?
element.symbolize_keys! if element[:discrete]
if element[:display_type].nil? element[:display_type] = 'event'
if element[:discrete] else
element[:display_type] = 'event' element[:display_type] = 'continuous'
else end
element[:display_type] = 'continuous'
end end
element.slice(*element_attrs)
end end
element.slice(*element_attrs)
end end
metadata.symbolize_keys
end end
metadata.symbolize_keys
end
# create an array from string response # create an array from string response
def __parse_data(resp) def __parse_data(resp)
return [] if resp.nil? # no data returned return [] if resp.nil? # no data returned
data = [] data = []
add_break = false add_break = false
resp.split("\n").each do |row| resp.split("\n").each do |row|
next if row.empty? # last row is empty (\n) next if row.empty? # last row is empty (\n)
words = row.split(' ') words = row.split(' ')
# check if this is an interval # check if this is an interval
if words[0] == '#' if words[0] == '#'
# this is a comment line, check if it is an interval boundary marker # this is a comment line, check if it is an interval boundary marker
intervalStart = words[2].to_i if words[1] == 'interval-start' intervalStart = words[2].to_i if words[1] == 'interval-start'
if words[1] == 'interval-end' if words[1] == 'interval-end'
intervalEnd = words[2].to_i intervalEnd = words[2].to_i
add_break = true if intervalEnd != intervalStart add_break = true if intervalEnd != intervalStart
end
next
end end
next data.push(nil) if add_break # add a data break
add_break = false
# this is a normal row
data.push(words.map(&:to_f))
end end
data.push(nil) if add_break # add a data break data
add_break = false
# this is a normal row
data.push(words.map(&:to_f))
end end
data
end
# create horizontal line segments representing # create horizontal line segments representing
# the intervals # the intervals
# #
def __parse_intervals(resp) def __parse_intervals(resp)
intervals = JSON.parse('[' + resp.chomp.gsub(/\r\n/, ',') + ']') intervals = JSON.parse('[' + resp.chomp.gsub(/\r\n/, ',') + ']')
data = [] data = []
intervals.each do |interval| intervals.each do |interval|
data.push([interval[0], 0]) data.push([interval[0], 0])
data.push([interval[1], 0]) data.push([interval[1], 0])
data.push(nil) # break up the intervals data.push(nil) # break up the intervals
end
data
end end
data
end end
end end
# frozen_string_literal: true # frozen_string_literal: true
module Nilmdb
# Loads stream data over specified interval
class LoadStreamData
include ServiceStatus
attr_reader :data, :data_type, :decimation_factor
# Loads stream data over specified interval def initialize(db_backend)
class LoadStreamData super()
include ServiceStatus @db_backend = db_backend
attr_reader :data, :data_type, :decimation_factor @data = []
@data_type = 'unset' # interval, raw, decimated
def initialize(db_adapter) @decimation_factor = 1
super() end
@db_adapter = db_adapter
@data = []
@data_type = 'unset' # interval, raw, decimated
@decimation_factor = 1
end
# load data at or below the resolution of the # load data at or below the resolution of the
# associated database, sets data and data_type # associated database, sets data and data_type
# specify a subset of elements as an optional array # specify a subset of elements as an optional array
# if ommitted, all elements are extracted from the stream (expensive!) # if ommitted, all elements are extracted from the stream (expensive!)
# optionally specify a resolution, if omitted, returns maximum resolution # optionally specify a resolution, if omitted, returns maximum resolution
# allowed by the nilm # allowed by the nilm
# #
# sets data and data_type # sets data and data_type
# data_type: raw # data_type: raw
# data: # data:
# [{id: element_id, type: raw values: [[ts,y],[ts,y],nil,[ts,y]]},...] # [{id: element_id, type: raw values: [[ts,y],[ts,y],nil,[ts,y]]},...]
# #
# data_type: decimated # data_type: decimated
# event data: # event data:
# [{id: element_id, type: interval, values: [[start,0],[end,0],nil,...]}] # [{id: element_id, type: interval, values: [[start,0],[end,0],nil,...]}]
# continuous or discrete data: # continuous or discrete data:
# [{id: element_id, type: decimated, values: [[ts,y,ymin,ymax],[ts,y,ymin,ymax],nil,...]}] # [{id: element_id, type: decimated, values: [[ts,y,ymin,ymax],[ts,y,ymin,ymax],nil,...]}]
# #
# data_type: interval # data_type: interval
# data: # data:
# [{id: element_id, type: interval, values: [[start,0],[end,0],nil,...]}] # [{id: element_id, type: interval, values: [[start,0],[end,0],nil,...]}]
# #
def run(db_stream, start_time, end_time, elements = [], resolution=nil) def run(db_stream, start_time, end_time, elements = [], resolution=nil)
# if elements are not explicitly passed, get all of them # if elements are not explicitly passed, get all of them
if(elements.empty?) if(elements.empty?)
elements = db_stream.db_elements.all.to_a elements = db_stream.db_elements.all.to_a
end end
elements.sort_by!(&:column) elements.sort_by!(&:column)
resolution = if resolution.nil? resolution = if resolution.nil?
db_stream.db.max_points_per_plot db_stream.db.max_points_per_plot
else else
[db_stream.db.max_points_per_plot,resolution].min [db_stream.db.max_points_per_plot,resolution].min
end end
valid_decim = findValidDecimationLevel(db_stream, start_time) valid_decim = findValidDecimationLevel(db_stream, start_time)
# valid_decim is the highest resolution, find one we can plot # valid_decim is the highest resolution, find one we can plot
plottable_decim = findPlottableDecimationLevel( plottable_decim = findPlottableDecimationLevel(
db_stream, valid_decim, start_time, end_time, resolution db_stream, valid_decim, start_time, end_time, resolution
) )
if plottable_decim.nil? if plottable_decim.nil?
# check if its nil becuase the nilm isn't available # check if its nil becuase the nilm isn't available
return self unless success? return self unless success?
# data is not sufficiently decimated, get intervals from # data is not sufficiently decimated, get intervals from
# the valid decimation level (highest resolution) # the valid decimation level (highest resolution)
path = __build_path(db_stream, valid_decim.level) path = __build_path(db_stream, valid_decim.level)
resp = @db_adapter.get_intervals(path, start_time, end_time) resp = @db_backend.get_intervals(path, start_time, end_time)
@data_type = 'interval' @data_type = 'interval'
@data = __build_interval_data(elements, resp) @data = __build_interval_data(elements, resp)
return self return self
end end
# request is plottable, see if we can get the data # request is plottable, see if we can get the data
@decimation_factor = plottable_decim.level @decimation_factor = plottable_decim.level
path = __build_path(db_stream, plottable_decim.level) path = __build_path(db_stream, plottable_decim.level)
resp = @db_adapter.get_data(path, start_time, end_time) resp = @db_backend.get_data(path, start_time, end_time)
if resp.nil? if resp.nil?
add_error("cannot get data for [#{path}] @ #{@db_adapter.url}") add_error("cannot get data for [#{path}] @ #{@db_backend.url}")
return self return self
end end
if plottable_decim.level == 1 if plottable_decim.level == 1
@data_type = 'raw' @data_type = 'raw'
@data = __build_raw_data(elements, resp) @data = __build_raw_data(elements, resp)
else else
@data_type = 'decimated' @data_type = 'decimated'
decimateable_elements = decimateable_elements =
elements.select{|e| %w(continuous discrete).include? e.display_type} elements.select{|e| %w(continuous discrete).include? e.display_type}
interval_elements = elements.select{|e| e.display_type=='event'} interval_elements = elements.select{|e| e.display_type=='event'}
@data = __build_decimated_data(decimateable_elements, resp) + @data = __build_decimated_data(decimateable_elements, resp) +
__build_intervals_from_decimated_data(interval_elements, resp) __build_intervals_from_decimated_data(interval_elements, resp)
end
self
end end
self
end
#===Description #===Description
# Given a starting decimation level and time interval # Given a starting decimation level and time interval
# find a decimation level that meets the target resolution # find a decimation level that meets the target resolution
#===Parameters #===Parameters
# * +db_stream+ - DbStream object # * +db_stream+ - DbStream object
# * +start_time+ - unix timestamp in us # * +start_time+ - unix timestamp in us
# #
# returns: +decimation_level+ - DecimationLevel object # returns: +decimation_level+ - DecimationLevel object
# *NOTE:* if the data is too high resolution to request # *NOTE:* if the data is too high resolution to request
# (data is not sufficiently decimated), # (data is not sufficiently decimated),
# the decimation level will be 0 # the decimation level will be 0
# #
def findPlottableDecimationLevel( def findPlottableDecimationLevel(
db_stream, valid_decim, start_time, end_time, resolution db_stream, valid_decim, start_time, end_time, resolution
) )
path = db_stream.path path = db_stream.path
path += "~decim-#{valid_decim.level}" if valid_decim.level > 1 path += "~decim-#{valid_decim.level}" if valid_decim.level > 1
# figure out how much data this stream has over the interval # figure out how much data this stream has over the interval
count = @db_adapter.get_count(path, start_time, end_time) count = @db_backend.get_count(path, start_time, end_time)
if count.nil? if count.nil?
add_error("cannot get count for [#{path}] @ #{@db_adapter.url}") add_error("cannot get count for [#{path}] @ #{@db_backend.url}")
return nil return nil
end
# find out how much raw data exists over the specified interval
raw_count = count * valid_decim.level
# now we can find the right decimation level for plotting
# if the valid decim can be plotted, use it
return valid_decim if raw_count <= resolution
# otherwise look for a higher decimation level
found_valid_decim = false
db_stream.db_decimations
.where('level >= ?', valid_decim.level)
.order(:level)
.each do |decim|
if raw_count / decim.level <= resolution
# the lowest decimation level is the best
return decim
end end
# find out how much raw data exists over the specified interval
raw_count = count * valid_decim.level
# now we can find the right decimation level for plotting
# if the valid decim can be plotted, use it
return valid_decim if raw_count <= resolution
# otherwise look for a higher decimation level
found_valid_decim = false
db_stream.db_decimations
.where('level >= ?', valid_decim.level)
.order(:level)
.each do |decim|
if raw_count / decim.level <= resolution
# the lowest decimation level is the best
return decim
end
end
# all of the decimations have too much data
# no plottable decimation exists
nil
end end
# all of the decimations have too much data
# no plottable decimation exists
nil
end
#===Description #===Description
# Given the plot resolution and time interval, find the decimation # Given the plot resolution and time interval, find the decimation
# level with the highest resolution data possible. This means # level with the highest resolution data possible. This means
# find highest resolution stream that has a start_time before # find highest resolution stream that has a start_time before
# the specified start_time # the specified start_time
#===Parameters #===Parameters
# * +db_stream+ - DbStream object # * +db_stream+ - DbStream object
# * +start_time+ - unix timestamp in us # * +start_time+ - unix timestamp in us
# #
# returns: +decimation_level+ - DecimationLevel object # returns: +decimation_level+ - DecimationLevel object
# #
def findValidDecimationLevel(db_stream, start_time) def findValidDecimationLevel(db_stream, start_time)
# assume raw stream is a valid level (best resolution) # assume raw stream is a valid level (best resolution)
validDecim = DbDecimation.new(level: 1) validDecim = DbDecimation.new(level: 1)
# check if raw stream has the data # check if raw stream has the data
if !db_stream.start_time.nil? && if !db_stream.start_time.nil? &&
db_stream.start_time <= start_time db_stream.start_time <= start_time
return validDecim return validDecim
end end
# keep track of the level thats missing the least data, this will be used # keep track of the level thats missing the least data, this will be used
# if no level can be found with all the data # if no level can be found with all the data
min_gap = db_stream.start_time - start_time min_gap = db_stream.start_time - start_time
db_stream.db_decimations.order(:level).each do |decim| db_stream.db_decimations.order(:level).each do |decim|
# skip empty decimation levels # skip empty decimation levels
next if decim.start_time.nil? || decim.end_time.nil? next if decim.start_time.nil? || decim.end_time.nil?
# the first (lowest) level with data over the interval is the best answer # the first (lowest) level with data over the interval is the best answer
return decim if decim.start_time <= start_time return decim if decim.start_time <= start_time
# this level doesn't contain all the requested data, see how much its missing # this level doesn't contain all the requested data, see how much its missing
gap = decim.start_time - start_time gap = decim.start_time - start_time
if min_gap.nil? || gap < min_gap if min_gap.nil? || gap < min_gap
min_gap = gap min_gap = gap
validDecim = decim validDecim = decim
end
end end
validDecim
end end
validDecim
end
def __build_path(db_stream, level) def __build_path(db_stream, level)
return db_stream.path if level == 1 return db_stream.path if level == 1
"#{db_stream.path}~decim-#{level}" "#{db_stream.path}~decim-#{level}"
end end
def __build_raw_data(elements, resp) def __build_raw_data(elements, resp)
data = elements.map { |e| { id: e.id, type: 'raw', values: [] } } data = elements.map { |e| { id: e.id, type: 'raw', values: [] } }
resp.each do |row| resp.each do |row|
if row.nil? # add an interval break to all the elements if row.nil? # add an interval break to all the elements
data.each { |d| d[:values].push(nil) } data.each { |d| d[:values].push(nil) }
next next
end end
ts = row[0] ts = row[0]
elements.each_with_index do |elem, i| elements.each_with_index do |elem, i|
data[i][:values].push([ts, __scale_value(row[1 + elem.column], elem)]) data[i][:values].push([ts, __scale_value(row[1 + elem.column], elem)])
end
end end
data
end end
data
end
def __build_decimated_data(elements, resp) def __build_decimated_data(elements, resp)
# if elements is empty we don't need to do anything # if elements is empty we don't need to do anything
return [] if elements.empty? return [] if elements.empty?
#prepare the data structure #prepare the data structure
data = elements.map { |e| { id: e.id, type: 'decimated', values: Array.new(resp.length) } } data = elements.map { |e| { id: e.id, type: 'decimated', values: Array.new(resp.length) } }
#set up constants so we compute them once #set up constants so we compute them once
mean_offset = 0 mean_offset = 0
min_offset = elements.first.db_stream.db_elements.length min_offset = elements.first.db_stream.db_elements.length
max_offset = elements.first.db_stream.db_elements.length * 2 max_offset = elements.first.db_stream.db_elements.length * 2
resp.each_with_index do |row, k| resp.each_with_index do |row, k|
if row.nil? # add an interval break to all the elements if row.nil? # add an interval break to all the elements
data.each { |d| d[:values][k]=nil } data.each { |d| d[:values][k]=nil }
next next
end end
ts = row[0] ts = row[0]
elements.each_with_index do |elem, i| elements.each_with_index do |elem, i|
#mean = __scale_value(row[1 + elem.column + mean_offset], elem) #mean = __scale_value(row[1 + elem.column + mean_offset], elem)
#min = __scale_value(row[1 + elem.column + min_offset], elem) #min = __scale_value(row[1 + elem.column + min_offset], elem)
#max = __scale_value(row[1 + elem.column + max_offset], elem) #max = __scale_value(row[1 + elem.column + max_offset], elem)
mean = (row[1 + elem.column + mean_offset] - elem.offset) * elem.scale_factor mean = (row[1 + elem.column + mean_offset] - elem.offset) * elem.scale_factor
min = (row[1 + elem.column + min_offset] - elem.offset) * elem.scale_factor min = (row[1 + elem.column + min_offset] - elem.offset) * elem.scale_factor
max = (row[1 + elem.column + max_offset] - elem.offset) * elem.scale_factor max = (row[1 + elem.column + max_offset] - elem.offset) * elem.scale_factor
tmp_min = [min, max].min tmp_min = [min, max].min
max = [min, max].max max = [min, max].max
min = tmp_min min = tmp_min
data[i][:values][k]=[ts, mean, min, max] data[i][:values][k]=[ts, mean, min, max]
end
end end
data
end end
data
end
def __build_interval_data(elements, resp) def __build_interval_data(elements, resp)
elements.map { |e| { id: e.id, type: 'interval', values: resp } } elements.map { |e| { id: e.id, type: 'interval', values: resp } }
end end
# for data that cannot be represented as decimations # for data that cannot be represented as decimations
# eg: events, compute intervals from the actual decimated data # eg: events, compute intervals from the actual decimated data
def __build_intervals_from_decimated_data(elements, resp) def __build_intervals_from_decimated_data(elements, resp)
# if elements is empty we don't need to do anything # if elements is empty we don't need to do anything
return [] if elements.empty? return [] if elements.empty?
# compute intervals from resp # compute intervals from resp
if resp.empty? if resp.empty?
elements.map do |e| elements.map do |e|
{ id: e.id, { id: e.id,
type: 'interval', type: 'interval',
values: [] } values: [] }
end
end end
end intervals = []
intervals = [] interval_start = nil
interval_start = nil interval_end = nil
interval_end = nil resp.each do |row|
resp.each do |row| if row.nil?
if row.nil? if !interval_start.nil? && !interval_end.nil?
if !interval_start.nil? && !interval_end.nil? # interval break and we know the start and end times
# interval break and we know the start and end times intervals += [[interval_start, 0], [interval_end, 0], nil]
intervals += [[interval_start, 0], [interval_end, 0], nil] interval_start = nil
interval_start = nil end
next
end end
next if interval_start.nil?
interval_start = row[0]
next
end
interval_end = row[0]
end
if !interval_start.nil? && !interval_end.nil?
intervals += [[interval_start, 0], [interval_end, 0]]
end end
if interval_start.nil? elements.map do |e|
interval_start = row[0] { id: e.id,
next type: 'interval',
values: intervals }
end end
interval_end = row[0]
end end
if !interval_start.nil? && !interval_end.nil? def __scale_value(value, element)
intervals += [[interval_start, 0], [interval_end, 0]] (value.to_f - element.offset) * element.scale_factor
end end
elements.map do |e|
{ id: e.id,
type: 'interval',
values: intervals }
end
end
def __scale_value(value, element)
(value.to_f - element.offset) * element.scale_factor
end end
end end
\ No newline at end of file
# frozen_string_literal: true # frozen_string_literal: true
module Nilmdb
# Handles construction of database objects # Handles construction of database objects
class UpdateDb class UpdateDb
include ServiceStatus include ServiceStatus
def initialize(db:) def initialize(db:)
@db = db @db = db
super() super()
end
def run(dbinfo, schema)
# check to make sure dbinfo and schema are set
# if either is nil, the database is not available
if(dbinfo.nil? || schema.nil?)
add_error("cannot contact database at #{@db.url}")
@db.update_attributes(available: false)
return self
else
@db.available = true
end end
# create the root folder if it doesn't exist def run(dbinfo, schema)
@db.root_folder ||= DbFolder.create(db: @db, name: 'root', path: '/') # check to make sure dbinfo and schema are set
@root_folder = @db.root_folder # if either is nil, the database is not available
if(dbinfo.nil? || schema.nil?)
# create the entry array from the schema add_error("cannot contact database at #{@db.url}")
entries = __create_entries(schema) @db.update_attributes(available: false)
return self
updater = UpdateFolder.new(@root_folder, entries) else
@db.available = true
# update db attributes from dbinfo end
@db.size_total = dbinfo[:size_total]
@db.size_db = dbinfo[:size_db] # create the root folder if it doesn't exist
@db.size_other = dbinfo[:size_other] @db.root_folder ||= DbFolder.create(db: @db, name: 'root', path: '/')
@db.version = dbinfo[:version] @root_folder = @db.root_folder
#@root_folder.transaction do
absorb_status(updater.run) # create the entry array from the schema
#end entries = __create_entries(schema)
@db.save
set_notice("Database refreshed") updater = UpdateFolder.new(@root_folder, entries)
self
end # update db attributes from dbinfo
@db.size_total = dbinfo[:size_total]
@db.size_db = dbinfo[:size_db]
@db.size_other = dbinfo[:size_other]
@db.version = dbinfo[:version]
#@root_folder.transaction do
absorb_status(updater.run)
#end
@db.save
set_notice("Database refreshed")
self
end
protected protected
# Adds :chunks to each schema element # Adds :chunks to each schema element
# :chunks is an array of the entry's path elements # :chunks is an array of the entry's path elements
# this makes it easier to traverse the database structure. # this makes it easier to traverse the database structure.
# The array is reversed so the chunks can be popped off in order # The array is reversed so the chunks can be popped off in order
# path: '/data/meter/prep-a' # path: '/data/meter/prep-a'
# chunks: ['prep-a','meter','data'] # chunks: ['prep-a','meter','data']
# #
def __create_entries(schema) def __create_entries(schema)
schema.map do |entry| schema.map do |entry|
entry[:chunks] = entry[:path][1..-1].split('/').reverse entry[:chunks] = entry[:path][1..-1].split('/').reverse
entry entry
end
end end
end end
end end
# frozen_string_literal: true # frozen_string_literal: true
module Nilmdb
# Handles construction of DbFolder objects # Handles construction of DbFolder objects
class UpdateFolder class UpdateFolder
include ServiceStatus include ServiceStatus
attr_reader :start_time, :end_time, :size_on_disk attr_reader :start_time, :end_time, :size_on_disk
def initialize(folder, entries) def initialize(folder, entries)
@folder = folder @folder = folder
@entries = entries @entries = entries
# initialize extents, these are updated as folders/streams are added # initialize extents, these are updated as folders/streams are added
@start_time = nil @start_time = nil
@end_time = nil @end_time = nil
@size_on_disk = 0 @size_on_disk = 0
# initialiaze array of current entries, ids are removed # initialiaze array of current entries, ids are removed
# as they are updated, so any id's left in this # as they are updated, so any id's left in this
# array are no longer present on the remote db # array are no longer present on the remote db
# and will be destroyed # and will be destroyed
@subfolder_ids = folder.subfolders.ids @subfolder_ids = folder.subfolders.ids
@stream_ids = folder.db_streams.ids @stream_ids = folder.db_streams.ids
super() super()
end
# returns the updated DbFolder object
def run
# update the folder attributes from metadata
info = __read_info_entry(@entries) || {}
# if metadata is corrupt, use default values instead
unless @folder.update_attributes(
info.slice(*DbFolder.defined_attributes))
@folder.use_default_attributes
Rails.logger.warn("corrupt metadata: #{@folder.path}")
end
# process the contents of the folder
__parse_folder_entries(@folder, @entries)
# delete any streams or folders still in the
# tracked ID arrays, they haven't been touched
# so they must have been removed from the remote
# db some other way (eg nilmtool)
unless @stream_ids.empty?
@folder.db_streams.destroy(*@stream_ids)
add_warning('Removed streams no longer in the remote database')
end end
unless @subfolder_ids.empty? # returns the updated DbFolder object
@folder.subfolders.destroy(*@subfolder_ids) def run
add_warning('Removed folders no longer in the remote database') # update the folder attributes from metadata
end info = __read_info_entry(@entries) || {}
# add the extents computed during updates # if metadata is corrupt, use default values instead
@folder.start_time = @start_time unless @folder.update_attributes(
@folder.end_time = @end_time info.slice(*DbFolder.defined_attributes))
@folder.size_on_disk = @size_on_disk @folder.use_default_attributes
# save the result Rails.logger.warn("corrupt metadata: #{@folder.path}")
unless @folder.valid? end
Rails.logger.warn("invalid folder: #{@folder.name}") # process the contents of the folder
__parse_folder_entries(@folder, @entries)
# delete any streams or folders still in the
# tracked ID arrays, they haven't been touched
# so they must have been removed from the remote
# db some other way (eg nilmtool)
unless @stream_ids.empty?
@folder.db_streams.destroy(*@stream_ids)
add_warning('Removed streams no longer in the remote database')
end
unless @subfolder_ids.empty?
@folder.subfolders.destroy(*@subfolder_ids)
add_warning('Removed folders no longer in the remote database')
end
# add the extents computed during updates
@folder.start_time = @start_time
@folder.end_time = @end_time
@folder.size_on_disk = @size_on_disk
# save the result
unless @folder.valid?
Rails.logger.warn("invalid folder: #{@folder.name}")
end
@folder.save!
set_notice("Folder updated")
self
end end
@folder.save!
set_notice("Folder updated")
self
end
protected protected
# if this folder has an info stream, find that entry and # if this folder has an info stream, find that entry and
# use its metadata to update the folder's attributes # use its metadata to update the folder's attributes
def __read_info_entry(entries) def __read_info_entry(entries)
info_entry = entries.detect do |entry| info_entry = entries.detect do |entry|
entry[:chunks] == ['info'] entry[:chunks] == ['info']
end
info_entry ||= {}
# if there is an info entry, remove it from the array
# so we don't process it as a seperate stream
entries.delete(info_entry)
# return the attributes
info_entry[:attributes]
end end
info_entry ||= {}
# if there is an info entry, remove it from the array
# so we don't process it as a seperate stream
entries.delete(info_entry)
# return the attributes
info_entry[:attributes]
end
# Creates or updates the folder defined by these entries.
# Then adds in any subfolders or streams
def __parse_folder_entries(folder, entries)
# group the folder entries
groups = __group_entries(entries)
# process the groups as subfolders or streams
__process_folder_contents(folder, groups)
# return the updated folder
folder
end
# collect the folder's entries into a set of groups # Creates or updates the folder defined by these entries.
# based off the next item in their :chunk array # Then adds in any subfolders or streams
# returns entry_groups which is a Hash with def __parse_folder_entries(folder, entries)
# :key = name of the common chunk # group the folder entries
# :value = the entry, less the common chunk groups = __group_entries(entries)
def __group_entries(entries) # process the groups as subfolders or streams
entry_groups = {} __process_folder_contents(folder, groups)
entries.map do |entry| # return the updated folder
# group streams by their base paths (ignore ~decim endings) folder
group_name = entry[:chunks].pop.gsub(UpdateStream.decimation_tag, '')
__add_to_group(entry_groups, group_name, entry)
end end
entry_groups
end
# helper function to __group_entries that handles # collect the folder's entries into a set of groups
# sorting entries into the entry_groups Hash # based off the next item in their :chunk array
def __add_to_group(entry_groups, group_name, entry) # returns entry_groups which is a Hash with
entry_groups[group_name] ||= [] # :key = name of the common chunk
if entry[:chunks] == ['info'] # put the info stream in front # :value = the entry, less the common chunk
entry_groups[group_name].prepend(entry) def __group_entries(entries)
else entry_groups = {}
entry_groups[group_name].append(entry) entries.map do |entry|
# group streams by their base paths (ignore ~decim endings)
group_name = entry[:chunks].pop.gsub(UpdateStream.decimation_tag, '')
__add_to_group(entry_groups, group_name, entry)
end
entry_groups
end end
end
# convert the groups into subfolders and streams # helper function to __group_entries that handles
def __process_folder_contents(folder, groups) # sorting entries into the entry_groups Hash
groups.each do |name, entry_group| def __add_to_group(entry_groups, group_name, entry)
if stream?(entry_group) entry_groups[group_name] ||= []
updater = __build_stream(folder, entry_group, name) if entry[:chunks] == ['info'] # put the info stream in front
next if updater.nil? # ignore orphaned decimations entry_groups[group_name].prepend(entry)
else # its a folder else
updater = __build_folder(folder, entry_group, name) entry_groups[group_name].append(entry)
end end
absorb_status(updater.run)
absorb_data_extents(updater) # update start, end & size_on_disk
end end
end
# determine if the entry groups constitute a single stream # convert the groups into subfolders and streams
def stream?(entry_group) def __process_folder_contents(folder, groups)
# if any entry_group has chunks left, this is a folder groups.each do |name, entry_group|
entry_group.select { |entry| if stream?(entry_group)
!entry[:chunks].empty? updater = __build_stream(folder, entry_group, name)
}.count.zero? next if updater.nil? # ignore orphaned decimations
end else # its a folder
updater = __build_folder(folder, entry_group, name)
end
absorb_status(updater.run)
absorb_data_extents(updater) # update start, end & size_on_disk
end
end
# create or update a DbStream object at the # determine if the entry groups constitute a single stream
# specified path. def stream?(entry_group)
def __build_stream(folder, entry_group, # if any entry_group has chunks left, this is a folder
default_name) entry_group.select { |entry|
base = __base_entry(entry_group) !entry[:chunks].empty?
unless base # corrupt stream, don't process }.count.zero?
add_warning("#{entry_group.count} orphan decimations in #{folder.name}")
return
end end
# find or create the stream
stream = folder.db_streams.find_by_path(base[:path])
stream ||= DbStream.new(db: folder.db, db_folder: folder,
path: base[:path], name: default_name)
# remove the id (if present) to mark this stream as updated
@stream_ids -= [stream.id]
# return the Updater, don't run it
UpdateStream.new(stream, base, entry_group - [base])
end
# find the base stream in this entry_group # create or update a DbStream object at the
# this is the stream that doesn't have a decimXX tag # specified path.
# adds a warning and returns nil if base entry is missing def __build_stream(folder, entry_group,
def __base_entry(entry_group) default_name)
base_entry = entry_group.select { |entry| base = __base_entry(entry_group)
entry[:path].match(UpdateStream.decimation_tag).nil? unless base # corrupt stream, don't process
}.first add_warning("#{entry_group.count} orphan decimations in #{folder.name}")
return nil unless base_entry return
base_entry end
end # find or create the stream
stream = folder.db_streams.find_by_path(base[:path])
stream ||= DbStream.new(db: folder.db, db_folder: folder,
path: base[:path], name: default_name)
# remove the id (if present) to mark this stream as updated
@stream_ids -= [stream.id]
# return the Updater, don't run it
UpdateStream.new(stream, base, entry_group - [base])
end
# create or update a DbFolder object at the # find the base stream in this entry_group
# specified path. # this is the stream that doesn't have a decimXX tag
def __build_folder(parent, entries, default_name) # adds a warning and returns nil if base entry is missing
path = __build_path(entries) def __base_entry(entry_group)
folder = parent.subfolders.find_by_path(path) base_entry = entry_group.select { |entry|
folder ||= DbFolder.new(parent: parent, db: parent.db, entry[:path].match(UpdateStream.decimation_tag).nil?
path: path, name: default_name) }.first
# remove the id (if present) to mark this folder as updated return nil unless base_entry
@subfolder_ids -= [folder.id] base_entry
# return the Updater, don't run it end
UpdateFolder.new(folder, entries)
end
# all entries agree on a common path # create or update a DbFolder object at the
# up to the point where they still have # specified path.
# chunks. Get this common path by popping def __build_folder(parent, entries, default_name)
# the chunks off the first entry's path path = __build_path(entries)
def __build_path(entries) folder = parent.subfolders.find_by_path(path)
parts = entries[0][:path].split('/') folder ||= DbFolder.new(parent: parent, db: parent.db,
parts.pop(entries[0][:chunks].length) path: path, name: default_name)
parts.join('/') # stitch parts together to form a path # remove the id (if present) to mark this folder as updated
end @subfolder_ids -= [folder.id]
# return the Updater, don't run it
UpdateFolder.new(folder, entries)
end
# update extents based on result of updater # all entries agree on a common path
# (either a stream or a subfolder) # up to the point where they still have
def absorb_data_extents(updater) # chunks. Get this common path by popping
if @start_time.nil? # the chunks off the first entry's path
@start_time = updater.start_time def __build_path(entries)
elsif !updater.start_time.nil? parts = entries[0][:path].split('/')
@start_time = [@start_time, updater.start_time].min parts.pop(entries[0][:chunks].length)
parts.join('/') # stitch parts together to form a path
end end
if @end_time.nil?
@end_time = updater.end_time # update extents based on result of updater
elsif !updater.end_time.nil? # (either a stream or a subfolder)
@end_time = [@end_time, updater.end_time].max def absorb_data_extents(updater)
if @start_time.nil?
@start_time = updater.start_time
elsif !updater.start_time.nil?
@start_time = [@start_time, updater.start_time].min
end
if @end_time.nil?
@end_time = updater.end_time
elsif !updater.end_time.nil?
@end_time = [@end_time, updater.end_time].max
end
@size_on_disk += updater.size_on_disk
end end
@size_on_disk += updater.size_on_disk
end end
end end
\ No newline at end of file
# frozen_string_literal: true # frozen_string_literal: true
module Nilmdb
# Handles construction of DbFolder objects
class UpdateStream
include ServiceStatus
attr_reader :start_time, :end_time, :size_on_disk
# Handles construction of DbFolder objects def initialize(stream, base_entry, decimation_entries)
class UpdateStream @stream = stream
include ServiceStatus @base_entry = base_entry
attr_reader :start_time, :end_time, :size_on_disk @decimation_entries = decimation_entries
# initialize extents, these set during run
@start_time = nil
@end_time = nil
@size_on_disk = 0
super()
end
def initialize(stream, base_entry, decimation_entries) def run
@stream = stream __update_stream(@stream, @base_entry, @decimation_entries)
@base_entry = base_entry set_notice("Stream updated")
@decimation_entries = decimation_entries self
# initialize extents, these set during run end
@start_time = nil
@end_time = nil
@size_on_disk = 0
super()
end
def run # regex matching the ~decimXX ending on a stream path
__update_stream(@stream, @base_entry, @decimation_entries) def self.decimation_tag
set_notice("Stream updated") /~decim-([\d]+)$/
self end
end
# regex matching the ~decimXX ending on a stream path # create or update a DbStream object at the
def self.decimation_tag # specified path.
/~decim-([\d]+)$/ def __update_stream(stream, base_entry, decimation_entries)
end # use default attributes if metadata is corrupt
unless stream.update_attributes(base_entry[:attributes])
stream.use_default_attributes
Rails.logger.warn("corrupt metadata: #{stream.path}")
# create or update a DbStream object at the end
# specified path. __compute_extents([base_entry] + decimation_entries)
def __update_stream(stream, base_entry, decimation_entries) stream.start_time = @start_time
# use default attributes if metadata is corrupt stream.end_time = @end_time
unless stream.update_attributes(base_entry[:attributes]) stream.size_on_disk = @size_on_disk
stream.use_default_attributes stream.save!
Rails.logger.warn("corrupt metadata: #{stream.path}")
__build_decimations(stream: stream,
entry_group: decimation_entries)
__build_elements(stream: stream, stream_data: base_entry[:elements])
end end
__compute_extents([base_entry] + decimation_entries)
stream.start_time = @start_time
stream.end_time = @end_time
stream.size_on_disk = @size_on_disk
stream.save!
__build_decimations(stream: stream,
entry_group: decimation_entries)
__build_elements(stream: stream, stream_data: base_entry[:elements])
end
# create or update DbDecimations for the # create or update DbDecimations for the
# specified DbStream # specified DbStream
def __build_decimations(stream:, entry_group:) def __build_decimations(stream:, entry_group:)
if !entry_group.empty? if !entry_group.empty?
Rails.logger.debug("deleting decimations for #{stream.path}") Rails.logger.debug("deleting decimations for #{stream.path}")
stream.db_decimations.destroy_all #remove existing decimations stream.db_decimations.destroy_all #remove existing decimations
end end
entry_group.each do |entry| entry_group.each do |entry|
level = entry[:path].match(UpdateStream.decimation_tag)[1].to_i level = entry[:path].match(UpdateStream.decimation_tag)[1].to_i
decim = stream.db_decimations.find_by_level(level) decim = stream.db_decimations.find_by_level(level)
decim ||= DbDecimation.new(db_stream: stream, level: level) decim ||= DbDecimation.new(db_stream: stream, level: level)
decim.update_attributes(entry[:attributes]) decim.update_attributes(entry[:attributes])
#decim.save! #decim.save!
end
end end
end
# create or update DbStreams for the # create or update DbStreams for the
# specified DbStream # specified DbStream
def __build_elements(stream:, stream_data:) def __build_elements(stream:, stream_data:)
stream.column_count.times do |x| stream.column_count.times do |x|
element = stream.db_elements.find_by_column(x) element = stream.db_elements.find_by_column(x)
element ||= DbElement.new(db_stream: stream, column: x, element ||= DbElement.new(db_stream: stream, column: x,
display_type: 'continuous') display_type: 'continuous')
# check if there is stream metadata for column x # check if there is stream metadata for column x
entry = stream_data.select { |meta| meta[:column] == x } entry = stream_data.select { |meta| meta[:column] == x }
# use the metadata if present # use the metadata if present
unless element.update_attributes(entry[0] || {}) unless element.update_attributes(entry[0] || {})
element.use_default_attributes element.use_default_attributes
element.save! element.save!
Rails.logger.warn(stream_data) Rails.logger.warn(stream_data)
Rails.logger.warn("corrupt metadata: #{stream.path}:"\ Rails.logger.warn("corrupt metadata: #{stream.path}:"\
"e#{element.column}") "e#{element.column}")
end
end end
end end
end
# compute the time range and total size of this stream # compute the time range and total size of this stream
# accepts an array of entries (include base & decim) # accepts an array of entries (include base & decim)
def __compute_extents(entries) def __compute_extents(entries)
entries.map { |x| x[:attributes] }.each do |attrs| entries.map { |x| x[:attributes] }.each do |attrs|
next if (attrs[:total_rows]).zero? next if (attrs[:total_rows]).zero?
if @start_time.nil? if @start_time.nil?
@start_time = attrs[:start_time] @start_time = attrs[:start_time]
@end_time = attrs[:end_time] @end_time = attrs[:end_time]
end
@start_time = [@start_time, attrs[:start_time]].min
@end_time = [@end_time, attrs[:end_time]].max
@size_on_disk += attrs[:total_rows] *
__bytes_per_row(attrs[:data_type])
end end
@start_time = [@start_time, attrs[:start_time]].min
@end_time = [@end_time, attrs[:end_time]].max
@size_on_disk += attrs[:total_rows] *
__bytes_per_row(attrs[:data_type])
end end
end
# compute how many bytes are required per row based # compute how many bytes are required per row based
# on the datatype (float32_8 => 4*8+8) # on the datatype (float32_8 => 4*8+8)
def __bytes_per_row(data_type) def __bytes_per_row(data_type)
regex = /[a-z]*(\d*)_(\d*)/.match(data_type) regex = /[a-z]*(\d*)_(\d*)/.match(data_type)
dtype_bytes = regex[1].to_i / 8 dtype_bytes = regex[1].to_i / 8
num_cols = regex[2].to_i num_cols = regex[2].to_i
ts_bytes = 8 ts_bytes = 8
ts_bytes + num_cols * dtype_bytes ts_bytes + num_cols * dtype_bytes
end
end end
end end
\ No newline at end of file
class NodeAdapterFactory
include HTTParty
def self.from_url(url)
begin
resp = self.class.get(url)
return nil unless resp.success?
info = resp.parsed_response
rescue
return nil
end
if info.include? 'NilmDB'
return Nilmdb::Adapter(url)
elsif info.include? 'Joule'
return Joule::Adapter(url)
else
return nil
end
end
def self.from_nilm(nilm)
if nilm.type=='nilmdb'
return Nilmdb::Adapter(nilm.url)
elsif nilm.type=='joule'
return Joule::Adapter(nilm.url)
else
# try to figure out what this nilm is
return self.from_url(nilm.url)
end
end
end
\ No newline at end of file
...@@ -6,6 +6,7 @@ class DbFoldersController < ApplicationController ...@@ -6,6 +6,7 @@ class DbFoldersController < ApplicationController
before_action :set_folder, only: [:show, :update] before_action :set_folder, only: [:show, :update]
before_action :authorize_viewer, only: [:show] before_action :authorize_viewer, only: [:show]
before_action :authorize_owner, only: [:update] before_action :authorize_owner, only: [:update]
before_action :create_adapter, only: [:update]
# GET /db_folders.json # GET /db_folders.json
def show; end def show; end
...@@ -13,8 +14,7 @@ class DbFoldersController < ApplicationController ...@@ -13,8 +14,7 @@ class DbFoldersController < ApplicationController
# PATCH/PUT /db_folders/1.json # PATCH/PUT /db_folders/1.json
# TODO: create info stream on folders on edit # TODO: create info stream on folders on edit
def update def update
adapter = DbAdapter.new(@db.url) @service = EditFolder.new(@node_adapter)
@service = EditFolder.new(adapter)
@service.run(@db_folder, folder_params) @service.run(@db_folder, folder_params)
render status: @service.success? ? :ok : :unprocessable_entity render status: @service.success? ? :ok : :unprocessable_entity
end end
...@@ -39,4 +39,13 @@ class DbFoldersController < ApplicationController ...@@ -39,4 +39,13 @@ class DbFoldersController < ApplicationController
def authorize_viewer def authorize_viewer
head :unauthorized unless current_user.views_nilm?(@nilm) head :unauthorized unless current_user.views_nilm?(@nilm)
end end
def create_adapter
@node_adapter = NodeAdapterFactory.from_nilm(@nilm)
if @node_adapter.nil?
@service = StubService.new
@service.add_error("Cannot contact installation")
render 'helpers/empty_response', status: :unprocessable_entity
end
end
end end
...@@ -6,7 +6,7 @@ class DbStreamsController < ApplicationController ...@@ -6,7 +6,7 @@ class DbStreamsController < ApplicationController
before_action :set_stream, only: [:update, :data] before_action :set_stream, only: [:update, :data]
before_action :authorize_viewer, only: [:data] before_action :authorize_viewer, only: [:data]
before_action :authorize_owner, only: [:update] before_action :authorize_owner, only: [:update]
before_action :create_adapter, only: [:update, :data]
def index def index
if params[:streams].nil? if params[:streams].nil?
head :unprocessable_entity head :unprocessable_entity
...@@ -24,14 +24,13 @@ class DbStreamsController < ApplicationController ...@@ -24,14 +24,13 @@ class DbStreamsController < ApplicationController
end end
def update def update
adapter = DbAdapter.new(@db.url) @service = EditStream.new(@node_adapter)
@service = EditStream.new(adapter)
@service.run(@db_stream, stream_params) @service.run(@db_stream, stream_params)
render status: @service.success? ? :ok : :unprocessable_entity render status: @service.success? ? :ok : :unprocessable_entity
end end
def data def data
@service = BuildDataset.new @service = BuildDataset.new(@node_adapter)
@service.run(@db_stream,params[:start_time].to_i,params[:end_time].to_i) @service.run(@db_stream,params[:start_time].to_i,params[:end_time].to_i)
unless @service.success? unless @service.success?
head :unprocessable_entity head :unprocessable_entity
...@@ -67,4 +66,13 @@ class DbStreamsController < ApplicationController ...@@ -67,4 +66,13 @@ class DbStreamsController < ApplicationController
def authorize_viewer def authorize_viewer
head :unauthorized unless current_user.views_nilm?(@nilm) head :unauthorized unless current_user.views_nilm?(@nilm)
end end
def create_adapter
@node_adapter = NodeAdapterFactory.from_nilm(@nilm)
if @node_adapter.nil?
@service = StubService.new
@service.add_error("Cannot contact installation")
render 'helpers/empty_response', status: :unprocessable_entity
end
end
end end
...@@ -24,8 +24,8 @@ class InterfacesController < ActionController::Base ...@@ -24,8 +24,8 @@ class InterfacesController < ActionController::Base
def get def get
path = params[:path] || '' path = params[:path] || ''
req = path +"?"+request.query_string req = path +"?"+request.query_string
adapter = JouleAdapter.new(@joule_module.nilm.url) backend = JouleAdapter.new(@joule_module.nilm.url)
render plain: adapter.module_interface(@joule_module,req) render plain: backend.module_interface(@joule_module,req)
end end
def put def put
......
...@@ -7,6 +7,7 @@ class NilmsController < ApplicationController ...@@ -7,6 +7,7 @@ class NilmsController < ApplicationController
before_action :authorize_viewer, only: [:show] before_action :authorize_viewer, only: [:show]
before_action :authorize_owner, only: [:update, :refresh] before_action :authorize_owner, only: [:update, :refresh]
before_action :authorize_admin, only: [:destroy] before_action :authorize_admin, only: [:destroy]
before_action :create_adapter, only: [:create]
# GET /nilms.json # GET /nilms.json
def index def index
...@@ -18,8 +19,8 @@ class NilmsController < ApplicationController ...@@ -18,8 +19,8 @@ class NilmsController < ApplicationController
#render the database and joule modules #render the database and joule modules
@role = current_user.get_nilm_permission(@nilm) @role = current_user.get_nilm_permission(@nilm)
#request new information from the NILM #request new information from the NILM
if(params[:refresh]) if params[:refresh]
@service = UpdateNilm.new() @service = UpdateNilm.new(@adapter)
@service.run(@nilm) @service.run(@nilm)
render status: @service.success? ? :ok : :unprocessable_entity render status: @service.success? ? :ok : :unprocessable_entity
else else
...@@ -29,7 +30,7 @@ class NilmsController < ApplicationController ...@@ -29,7 +30,7 @@ class NilmsController < ApplicationController
# POST /nilms.json # POST /nilms.json
def create def create
@service = CreateNilm.new @service = CreateNilm.new(@node_adapter)
@service.run(name: nilm_params[:name], @service.run(name: nilm_params[:name],
url: nilm_params[:url], url: nilm_params[:url],
description: nilm_params[:description], description: nilm_params[:description],
...@@ -74,6 +75,7 @@ class NilmsController < ApplicationController ...@@ -74,6 +75,7 @@ class NilmsController < ApplicationController
def set_nilm def set_nilm
@nilm = Nilm.find(params[:id]) @nilm = Nilm.find(params[:id])
@db = @nilm.db @db = @nilm.db
@adapter = Nilmdb::Adapter.new(@nilm.url)
end end
# Never trust parameters from the scary internet, # Never trust parameters from the scary internet,
...@@ -95,4 +97,13 @@ class NilmsController < ApplicationController ...@@ -95,4 +97,13 @@ class NilmsController < ApplicationController
def authorize_viewer def authorize_viewer
head :unauthorized unless current_user.views_nilm?(@nilm) head :unauthorized unless current_user.views_nilm?(@nilm)
end end
def create_adapter
@node_adapter = NodeAdapterFactory.from_url(nilm_params[:url])
if @node_adapter.nil?
@service = StubService.new
@service.add_error("Cannot contact installation")
render 'helpers/empty_response', status: :unprocessable_entity
end
end
end end
...@@ -14,7 +14,8 @@ class Nilm < ApplicationRecord ...@@ -14,7 +14,8 @@ class Nilm < ApplicationRecord
#---Validations----- #---Validations-----
validates :name, presence: true, uniqueness: true validates :name, presence: true, uniqueness: true
validates :url, presence: true, uniqueness: true validates :url, presence: true, uniqueness: true
validates :node_type, presence: true,
inclusion: { in: %w(nilmdb joule) }
#---Callbacks------ #---Callbacks------
before_destroy do |record| before_destroy do |record|
DataView.destroy(record.data_views.pluck(:id)) DataView.destroy(record.data_views.pluck(:id))
......
...@@ -7,8 +7,9 @@ class BuildDataset ...@@ -7,8 +7,9 @@ class BuildDataset
include ServiceStatus include ServiceStatus
attr_reader :data, :legend attr_reader :data, :legend
def initialize def initialize(node_adapter)
super() super()
@node_adapter = node_adapter
@data = [] # [[ts, val1, val2, val3, ...], @data = [] # [[ts, val1, val2, val3, ...],
# [ts, val1, val2, val3, ...]] # [ts, val1, val2, val3, ...]]
@legend = { @legend = {
...@@ -24,24 +25,20 @@ class BuildDataset ...@@ -24,24 +25,20 @@ class BuildDataset
# fill @data with values from db_stream # fill @data with values from db_stream
# and populate @legend # and populate @legend
def run(db_stream, start_time, end_time) def run(db_stream, start_time, end_time)
adapter = DbAdapter.new(db_stream.db.url) result = @node_adapter.load_data(db_stream, start_time, end_time)
data_service = LoadStreamData.new(adapter) if result.nil?
absorb_status(
data_service.run(db_stream, start_time, end_time)
)
unless data_service.success?
add_error("unable to retrieve data for #{db_stream.path}") add_error("unable to retrieve data for #{db_stream.path}")
return self return self
end end
@data = _build_dataset(data_service.data) @data = _build_dataset(result[:data])
@legend[:columns] = _build_legend_columns(data_service.data, db_stream) @legend[:columns] = _build_legend_columns(result[:data], db_stream)
@legend[:start_time] = start_time @legend[:start_time] = start_time
@legend[:end_time] = end_time @legend[:end_time] = end_time
@legend[:decimation_factor] = data_service.decimation_factor @legend[:decimation_factor] = result[:decimation_factor]
@legend[:num_rows] = @data.length @legend[:num_rows] = @data.length
if(@data.empty?) if @data.empty?
@legend[:notes] = 'there is no data available over this interval' @legend[:notes] = 'there is no data available over this interval'
elsif(@data[0].length!=db_stream.db_elements.length+1) elsif @data[0].length!=db_stream.db_elements.length+1
@legend[:notes] = 'some elements omitted due to insufficient decimation' @legend[:notes] = 'some elements omitted due to insufficient decimation'
end end
self self
......
require "benchmark" require "benchmark"
# frozen_string_literal: true # frozen_string_literal: true
# Loads data for specified elements # Loads data for specified elements
class LoadElementData class LoadElementData
include ServiceStatus include ServiceStatus
attr_reader :data, :start_time, :end_time attr_reader :data, :start_time, :end_time
def initialize() def initialize
super() super()
@data = [] @data = []
@start_time = nil @start_time = nil
...@@ -35,18 +34,10 @@ class LoadElementData ...@@ -35,18 +34,10 @@ class LoadElementData
end end
end end
#2 compute bounds by updating stream info if start/end are missing #2 compute bounds by updating stream info if start/end are missing
if(start_time==nil || end_time==nil) if start_time==nil || end_time==nil
req_streams.map do |stream| req_streams.map do |stream|
adapter = DbAdapter.new(stream.db.url) adapter = Nilmdb::Adapter.new(stream.db.url)
entries = adapter.stream_info(stream) adapter.refresh_stream(stream)
service = UpdateStream.new(
stream,
entries[:base_entry],
entries[:decimation_entries]
)
unless service.run.success?
Rails.logger.warn("Error updating #{stream.name}: #{service.errors}")
end
end end
end end
...@@ -54,7 +45,7 @@ class LoadElementData ...@@ -54,7 +45,7 @@ class LoadElementData
streams_with_data = req_streams.select{|stream| stream.total_time > 0} streams_with_data = req_streams.select{|stream| stream.total_time > 0}
if (start_time == nil || end_time == nil) && streams_with_data.empty? if (start_time == nil || end_time == nil) && streams_with_data.empty?
add_error("no time bounds for requested elements, refresh database?") add_error("no time bounds for requested elements, refresh database?")
return return self
end end
@start_time = start_time @start_time = start_time
@end_time = end_time @end_time = end_time
...@@ -70,18 +61,17 @@ class LoadElementData ...@@ -70,18 +61,17 @@ class LoadElementData
end end
if @start_time > @end_time if @start_time > @end_time
add_error("invalid time bounds") add_error("invalid time bounds")
return return self
end end
#4 pull data from streams #4 pull data from streams
combined_data = [] combined_data = []
req_streams.each do |stream| req_streams.each do |stream|
adapter = DbAdapter.new(stream.db.url)
data_service = LoadStreamData.new(adapter)
stream_elements = elements.select{|e| e.db_stream_id==stream.id}.to_a stream_elements = elements.select{|e| e.db_stream_id==stream.id}.to_a
data_service.run(stream, @start_time, @end_time,stream_elements,resolution) adapter = Nilmdb::Adapter.new(stream.db.url)
result = adapter.load_data(stream, @start_time, @end_time,stream_elements,resolution)
if data_service.success? if not result.nil?
combined_data.concat(data_service.data) combined_data.concat(result[:data])
else else
#create error entries #create error entries
error_entries = stream_elements.map do |e| error_entries = stream_elements.map do |e|
...@@ -94,6 +84,6 @@ class LoadElementData ...@@ -94,6 +84,6 @@ class LoadElementData
#5 extract requested elements from the stream datasets #5 extract requested elements from the stream datasets
req_element_ids = elements.pluck(:id) req_element_ids = elements.pluck(:id)
@data = combined_data.select{|d| req_element_ids.include? d[:id] } @data = combined_data.select{|d| req_element_ids.include? d[:id] }
return self self
end
end end
end
...@@ -4,9 +4,9 @@ ...@@ -4,9 +4,9 @@
class EditFolder class EditFolder
include ServiceStatus include ServiceStatus
def initialize(db_adapter) def initialize(node_adapter)
super() super()
@db_adapter = db_adapter @node_adapter = node_adapter
end end
def run(db_folder, attribs) def run(db_folder, attribs)
...@@ -20,7 +20,8 @@ class EditFolder ...@@ -20,7 +20,8 @@ class EditFolder
return self return self
end end
# local model checks out, update the remote NilmDB # local model checks out, update the remote NilmDB
status = @db_adapter.set_folder_metadata(db_folder) status = @node_adapter.save_folder(db_folder)
# if there was an error don't save the model # if there was an error don't save the model
if status[:error] if status[:error]
add_error(status[:msg]) add_error(status[:msg])
......
...@@ -4,9 +4,9 @@ ...@@ -4,9 +4,9 @@
class EditStream class EditStream
include ServiceStatus include ServiceStatus
def initialize(db_adapter) def initialize(node_adapter)
super() super()
@db_adapter = db_adapter @node_adapter = node_adapter
end end
def run(db_stream, attribs) def run(db_stream, attribs)
...@@ -23,7 +23,7 @@ class EditStream ...@@ -23,7 +23,7 @@ class EditStream
return self return self
end end
# local model checks out, update the remote NilmDB # local model checks out, update the remote NilmDB
status = @db_adapter.set_stream_metadata(db_stream) status = @node_adapter.save_stream(db_stream)
# if there was an error don't save the model # if there was an error don't save the model
if status[:error] if status[:error]
add_error(status[:msg]) add_error(status[:msg])
......
...@@ -5,13 +5,17 @@ class CreateNilm ...@@ -5,13 +5,17 @@ class CreateNilm
include ServiceStatus include ServiceStatus
attr_reader :nilm attr_reader :nilm
def initialize(node_adapter)
super()
@node_adapter = node_adapter
end
def run(name:, url:, owner:, description:'') def run(name:, url:, owner:, description:'')
# note: url should be NilmDB url # note: url should be NilmDB url
@nilm = Nilm.new(name: name, @nilm = Nilm.new(name: name,
description: description, description: description,
url: url) url: url,
node_type: @node_adapter.node_type)
unless @nilm.valid? unless @nilm.valid?
add_errors(@nilm.errors.full_messages) add_errors(@nilm.errors.full_messages)
return self return self
...@@ -31,12 +35,10 @@ class CreateNilm ...@@ -31,12 +35,10 @@ class CreateNilm
#give the owner 'admin' permissions on the nilm #give the owner 'admin' permissions on the nilm
Permission.create(user: owner, nilm: nilm, role: 'admin') Permission.create(user: owner, nilm: nilm, role: 'admin')
#update the database #update the database
service = UpdateDb.new(db: db) msgs = @node_adapter.refresh(db: db)
adapter = DbAdapter.new(db.url)
service.run(adapter.dbinfo, adapter.schema)
#errors on the database update are warnings on this service #errors on the database update are warnings on this service
#because we can still add the NILM, it will just be offline #because we can still add the NILM, it will just be offline
add_warnings(service.errors + service.warnings) add_warnings(msgs.errors + msgs.warnings)
add_notice('Created installation') add_notice('Created installation')
self self
end end
......
...@@ -4,21 +4,16 @@ ...@@ -4,21 +4,16 @@
class UpdateNilm class UpdateNilm
include ServiceStatus include ServiceStatus
def initialize(node_adapter)
super()
@node_adapter = node_adapter
end
def run(nilm) def run(nilm)
if nilm.db.nil? if nilm.db.nil?
add_error('no associated db object') add_error('no associated db object')
return self return self
end end
db_adapter = DbAdapter.new(nilm.url) absorb_status(@node_adapter.refresh(db: nilm.db))
db_service = UpdateDb.new(db: nilm.db)
absorb_status(
db_service.run(db_adapter.dbinfo, db_adapter.schema)
)
joule_adapter = JouleAdapter.new(nilm.url)
joule_module_service = UpdateJouleModules.new(nilm)
absorb_status(
joule_module_service.run(joule_adapter.module_info)
)
self self
end end
end end
...@@ -38,6 +38,9 @@ module ControlPanel ...@@ -38,6 +38,9 @@ module ControlPanel
%w(data nilm db db_folder db_stream permission user_group user data_view joule_modules).each do |service| %w(data nilm db db_folder db_stream permission user_group user data_view joule_modules).each do |service|
config.autoload_paths << Rails.root.join("app/services/#{service}") config.autoload_paths << Rails.root.join("app/services/#{service}")
end end
config.autoload_paths << Rails.root.join("app/adapters") config.autoload_paths << Rails.root.join("app/adapters/nilmdb")
config.autoload_paths << Rails.root.join("app/adapters/joule")
#config.autoload_paths << Rails.root.join("app/adapters/nilmdb")
end end
end end
class AddNodeTypeToNilm < ActiveRecord::Migration[5.2]
def change
add_column :nilms, :node_type, :string, default: false, null: false
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_06_20_021012) do ActiveRecord::Schema.define(version: 2018_07_10_014435) do
create_table "data_views", force: :cascade do |t| create_table "data_views", force: :cascade do |t|
t.integer "user_id" t.integer "user_id"
...@@ -155,6 +155,7 @@ ActiveRecord::Schema.define(version: 2018_06_20_021012) do ...@@ -155,6 +155,7 @@ ActiveRecord::Schema.define(version: 2018_06_20_021012) do
t.string "url" t.string "url"
t.datetime "created_at", null: false t.datetime "created_at", null: false
t.datetime "updated_at", null: false t.datetime "updated_at", null: false
t.string "node_type", default: "f", null: false
end end
create_table "permissions", force: :cascade do |t| create_table "permissions", force: :cascade do |t|
......
...@@ -2,12 +2,12 @@ ...@@ -2,12 +2,12 @@
require 'rails_helper' require 'rails_helper'
describe JouleAdapter do describe Joule::Backend do
# use the benchtop server joule API # use the benchtop server joule API
let (:url) {'http://172.16.1.12/joule'} let (:url) {'http://172.16.1.12/joule'}
it 'retrieves module infos', :vcr do it 'retrieves module infos', :vcr do
adapter = JouleAdapter.new(url) backend = Joule::Backend.new(url)
adapter.module_info.each do |m| backend.module_info.each do |m|
expect(m).to include(:name, :exec_cmd, :web_interface) expect(m).to include(:name, :exec_cmd, :web_interface)
end end
end end
......
...@@ -2,12 +2,12 @@ ...@@ -2,12 +2,12 @@
require 'rails_helper' require 'rails_helper'
describe DbAdapter do describe Nilmdb::Backend do
# use the vagrant box loaded with example database # use the vagrant box loaded with example database
let (:url) {'http://localhost:8080/nilmdb'} let (:url) {'http://localhost:8080/nilmdb'}
it 'retrieves basic schema', :vcr do it 'retrieves basic schema', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
adapter.schema.each do |entry| backend.schema.each do |entry|
expect(entry).to include(:path, :attributes) expect(entry).to include(:path, :attributes)
expect(entry[:attributes]).to( expect(entry[:attributes]).to(
include(:data_type, :start_time, include(:data_type, :start_time,
...@@ -17,8 +17,8 @@ describe DbAdapter do ...@@ -17,8 +17,8 @@ describe DbAdapter do
end end
it 'retrieves stream specific schema', :vcr do it 'retrieves stream specific schema', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
entries = adapter.stream_info(create(:db_stream,path:"/tutorial/pump-prep")) entries = backend.stream_info(create(:db_stream,path:"/tutorial/pump-prep"))
expect(entries[:base_entry][:path]).to eq "/tutorial/pump-prep" expect(entries[:base_entry][:path]).to eq "/tutorial/pump-prep"
#TODO: support decimation lookup, need HTTP API to process wild cards #TODO: support decimation lookup, need HTTP API to process wild cards
expect(entries[:decimation_entries].length).to eq 0 expect(entries[:decimation_entries].length).to eq 0
...@@ -26,17 +26,17 @@ describe DbAdapter do ...@@ -26,17 +26,17 @@ describe DbAdapter do
describe 'set_stream_metadata' do describe 'set_stream_metadata' do
it 'updates config_key in metadata', :vcr do it 'updates config_key in metadata', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
stream = DbStream.new(path: '/tutorial/pump-events', stream = DbStream.new(path: '/tutorial/pump-events',
name: 'test', description: 'new', db_elements_attributes: [ name: 'test', description: 'new', db_elements_attributes: [
{column: 0, name: 'element1'},{column: 1, name: 'element2'}]) {column: 0, name: 'element1'},{column: 1, name: 'element2'}])
result = adapter.set_stream_metadata(stream) result = backend.set_stream_metadata(stream)
expect(result[:error]).to be false expect(result[:error]).to be false
end end
it 'returns error on server failure', :vcr do it 'returns error on server failure', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
stream = DbStream.new(path: '/badpath') stream = DbStream.new(path: '/badpath')
result = adapter.set_stream_metadata(stream) result = backend.set_stream_metadata(stream)
expect(result[:error]).to be true expect(result[:error]).to be true
expect(result[:msg]).to match(/badpath/) expect(result[:msg]).to match(/badpath/)
end end
...@@ -44,23 +44,23 @@ describe DbAdapter do ...@@ -44,23 +44,23 @@ describe DbAdapter do
describe 'set_folder_metadata' do describe 'set_folder_metadata' do
it 'updates config_key in metadata', :vcr do it 'updates config_key in metadata', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
folder = DbFolder.new(path: '/tutorial', folder = DbFolder.new(path: '/tutorial',
name: 'test', description: 'new') name: 'test', description: 'new')
result = adapter.set_folder_metadata(folder) result = backend.set_folder_metadata(folder)
expect(result[:error]).to be false expect(result[:error]).to be false
end end
it 'creates info stream if missing', :vcr do it 'creates info stream if missing', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
folder = DbFolder.new(path: '/v2_folder/another', folder = DbFolder.new(path: '/v2_folder/another',
name: 'another', description: 'new') name: 'another', description: 'new')
result = adapter.set_folder_metadata(folder) result = backend.set_folder_metadata(folder)
expect(result[:error]).to be false expect(result[:error]).to be false
end end
it 'returns error on server failure', :vcr do it 'returns error on server failure', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
folder = DbFolder.new(path: '/badpath') folder = DbFolder.new(path: '/badpath')
result = adapter.set_folder_metadata(folder) result = backend.set_folder_metadata(folder)
expect(result[:error]).to be true expect(result[:error]).to be true
expect(result[:msg]).to match(/badpath/) expect(result[:msg]).to match(/badpath/)
end end
...@@ -68,42 +68,42 @@ describe DbAdapter do ...@@ -68,42 +68,42 @@ describe DbAdapter do
describe 'get_count' do describe 'get_count' do
it 'returns number of elements in path over interval', :vcr do it 'returns number of elements in path over interval', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
start_time = 1361546159000000 start_time = 1361546159000000
end_time = 1361577615684742 end_time = 1361577615684742
path = '/tutorial/pump-events' path = '/tutorial/pump-events'
raw_count = adapter.get_count(path,start_time, end_time) raw_count = backend.get_count(path,start_time, end_time)
lvl4_count = adapter.get_count(path+"~decim-4",start_time, end_time) lvl4_count = backend.get_count(path+"~decim-4",start_time, end_time)
expect(raw_count>0).to be true expect(raw_count>0).to be true
expect(raw_count/4).to eq(lvl4_count) expect(raw_count/4).to eq(lvl4_count)
end end
it 'returns nil on server failure', :vcr do it 'returns nil on server failure', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
start_time = 1361546159000000 start_time = 1361546159000000
end_time = 1361577615684742 end_time = 1361577615684742
path = '/path/does/not/exist' path = '/path/does/not/exist'
count = adapter.get_count(path,start_time, end_time) count = backend.get_count(path,start_time, end_time)
expect(count).to be nil expect(count).to be nil
end end
end end
describe 'get_data' do describe 'get_data' do
it 'returns array of data over interval', :vcr do it 'returns array of data over interval', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
start_time = 1361546159000000 start_time = 1361546159000000
end_time = 1361577615684742 end_time = 1361577615684742
path = '/tutorial/pump-events' path = '/tutorial/pump-events'
raw_data = adapter.get_data(path,start_time, end_time) raw_data = backend.get_data(path,start_time, end_time)
lvl4_data = adapter.get_data(path+"~decim-4",start_time, end_time) lvl4_data = backend.get_data(path+"~decim-4",start_time, end_time)
expect(raw_data.length>0).to be true expect(raw_data.length>0).to be true
expect(raw_data.length/4).to eq(lvl4_data.length) expect(raw_data.length/4).to eq(lvl4_data.length)
end end
it 'adds nil to indicate interval breaks', :vcr do it 'adds nil to indicate interval breaks', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
start_time = 1361466001000000 start_time = 1361466001000000
end_time = 1361577615684742 end_time = 1361577615684742
path = '/tutorial/pump-events' path = '/tutorial/pump-events'
data = adapter.get_data(path,start_time, end_time) data = backend.get_data(path,start_time, end_time)
expect(data.length>0).to be true expect(data.length>0).to be true
num_intervals = data.select{|elem| elem==nil}.length num_intervals = data.select{|elem| elem==nil}.length
expect(num_intervals).to eq 1 expect(num_intervals).to eq 1
...@@ -112,11 +112,11 @@ describe DbAdapter do ...@@ -112,11 +112,11 @@ describe DbAdapter do
describe 'get_intervals' do describe 'get_intervals' do
it 'returns array of interval line segments', :vcr do it 'returns array of interval line segments', :vcr do
adapter = DbAdapter.new(url) backend = Nilmdb::Backend.new(url)
start_time = 1360017784000000 start_time = 1360017784000000
end_time = 1361579612066315 end_time = 1361579612066315
path = '/tutorial/pump-events' path = '/tutorial/pump-events'
intervals = adapter.get_intervals(path,start_time, end_time) intervals = backend.get_intervals(path,start_time, end_time)
expect(intervals.length).to eq(60) #20 intervals expect(intervals.length).to eq(60) #20 intervals
end end
end end
......
...@@ -27,7 +27,7 @@ RSpec.describe 'LoadStreamData' do ...@@ -27,7 +27,7 @@ RSpec.describe 'LoadStreamData' do
raw_count: 1600, data: @data raw_count: 1600, data: @data
) )
@service = LoadStreamData.new(@mockAdapter) @service = Nilmdb::LoadStreamData.new(@mockAdapter)
end end
it 'sets @type to [decimated]' do it 'sets @type to [decimated]' do
@service.run(@db_stream, 10, 90) @service.run(@db_stream, 10, 90)
...@@ -105,7 +105,7 @@ RSpec.describe 'LoadStreamData' do ...@@ -105,7 +105,7 @@ RSpec.describe 'LoadStreamData' do
end_time: @db_stream.end_time, end_time: @db_stream.end_time,
raw_count: 1000, data: @data raw_count: 1000, data: @data
) )
@service = LoadStreamData.new(@mockAdapter) @service = Nilmdb::LoadStreamData.new(@mockAdapter)
end end
it 'sets @type to [interval] if all decimations have too much data' do it 'sets @type to [interval] if all decimations have too much data' do
@service.run(@db_stream, 10, 90) @service.run(@db_stream, 10, 90)
...@@ -140,7 +140,7 @@ RSpec.describe 'LoadStreamData' do ...@@ -140,7 +140,7 @@ RSpec.describe 'LoadStreamData' do
raw_count: 100, data: @data raw_count: 100, data: @data
) )
@service = LoadStreamData.new(@mockAdapter) @service = Nilmdb::LoadStreamData.new(@mockAdapter)
end end
it 'sets @type to [raw]' do it 'sets @type to [raw]' do
@service.run(@db_stream, 10, 90) @service.run(@db_stream, 10, 90)
...@@ -150,17 +150,17 @@ RSpec.describe 'LoadStreamData' do ...@@ -150,17 +150,17 @@ RSpec.describe 'LoadStreamData' do
end end
it 'only if count <= nilm resolution over interval' do it 'only if count <= nilm resolution over interval' do
#must have decimated data ready! #must have decimated data ready!
#use custom adapter and service objects #use custom backend and service objects
data = [[40,0,1,2,3,4,5,6,7,8],nil,[50,0,1,2,3,4,5,6,7,8]] data = [[40,0,1,2,3,4,5,6,7,8],nil,[50,0,1,2,3,4,5,6,7,8]]
adapter = MockDataDbAdapter.new( backend = MockDataDbAdapter.new(
start_time: @db_stream.start_time, start_time: @db_stream.start_time,
end_time: @db_stream.end_time, end_time: @db_stream.end_time,
raw_count: 100, data: data raw_count: 100, data: data
) )
service = LoadStreamData.new(adapter) service = Nilmdb::LoadStreamData.new(backend)
db.max_points_per_plot = 90; db.save db.max_points_per_plot = 90; db.save
service.run(@db_stream, 10, 90) service.run(@db_stream, 10, 90)
expect(adapter.level_retrieved).to be > 1 expect(backend.level_retrieved).to be > 1
end end
it 'populates @data structure with raw data' do it 'populates @data structure with raw data' do
@service.run(@db_stream, 10, 90) @service.run(@db_stream, 10, 90)
...@@ -191,7 +191,7 @@ RSpec.describe 'LoadStreamData' do ...@@ -191,7 +191,7 @@ RSpec.describe 'LoadStreamData' do
end_time: @db_stream.end_time, end_time: @db_stream.end_time,
raw_count: 400, data: @data raw_count: 400, data: @data
) )
@service = LoadStreamData.new(@mockAdapter) @service = Nilmdb::LoadStreamData.new(@mockAdapter)
end end
it 'still succeeds' do it 'still succeeds' do
#requested interval is before actual data #requested interval is before actual data
......
...@@ -28,7 +28,7 @@ describe 'UpdateDb' do ...@@ -28,7 +28,7 @@ describe 'UpdateDb' do
describe '*run*' do describe '*run*' do
def update_with_schema(schema, db: nil) def update_with_schema(schema, db: nil)
@db = db || Db.new @db = db || Db.new
@service = UpdateDb.new(db: @db) @service = Nilmdb::UpdateDb.new(db: @db)
mock_info = mock_info =
@service.run(dbinfo, schema) #ignore dbinfo @service.run(dbinfo, schema) #ignore dbinfo
@root = @db.root_folder @root = @db.root_folder
......
...@@ -4,7 +4,7 @@ require 'rails_helper' ...@@ -4,7 +4,7 @@ require 'rails_helper'
describe 'UpdateFolder service' do describe 'UpdateFolder service' do
let(:db) { Db.new } let(:db) { Db.new }
let(:service) { UpdateDb.new(db: db) } let(:service) { Nilmdb::UpdateDb.new(db: db) }
let(:helper) { DbSchemaHelper.new } let(:helper) { DbSchemaHelper.new }
let(:mock_dbinfo) { {} } let(:mock_dbinfo) { {} }
...@@ -20,7 +20,7 @@ describe 'UpdateFolder service' do ...@@ -20,7 +20,7 @@ describe 'UpdateFolder service' do
folder = DbFolder.find_by_name('old_name') folder = DbFolder.find_by_name('old_name')
expect(folder).to be_present expect(folder).to be_present
# run update again with new metadata # run update again with new metadata
service = UpdateDb.new(db: db) service = Nilmdb::UpdateDb.new(db: db)
service.run(mock_dbinfo, [helper.entry('/folder1/subfolder/info', service.run(mock_dbinfo, [helper.entry('/folder1/subfolder/info',
metadata: { name: 'new_name' })]) metadata: { name: 'new_name' })])
folder.reload folder.reload
......
...@@ -4,7 +4,7 @@ require 'rails_helper' ...@@ -4,7 +4,7 @@ require 'rails_helper'
describe 'UpdateStream service' do describe 'UpdateStream service' do
let(:db) { Db.new } let(:db) { Db.new }
let(:service) { UpdateDb.new(db: db) } let(:service) { Nilmdb::UpdateDb.new(db: db) }
let(:helper) { DbSchemaHelper.new } let(:helper) { DbSchemaHelper.new }
let(:mock_dbinfo) { {} } let(:mock_dbinfo) { {} }
...@@ -21,7 +21,7 @@ describe 'UpdateStream service' do ...@@ -21,7 +21,7 @@ describe 'UpdateStream service' do
stream = DbStream.find_by_name('old_name') stream = DbStream.find_by_name('old_name')
expect(stream).to be_present expect(stream).to be_present
# run update again with new metadata # run update again with new metadata
service = UpdateDb.new(db: db) service = Nilmdb::UpdateDb.new(db: db)
service.run(mock_dbinfo, [helper.entry('/folder1/stream1', service.run(mock_dbinfo, [helper.entry('/folder1/stream1',
metadata: { name: 'new_name' })]) metadata: { name: 'new_name' })])
stream.reload stream.reload
...@@ -59,7 +59,7 @@ describe 'UpdateStream service' do ...@@ -59,7 +59,7 @@ describe 'UpdateStream service' do
expect(element).to be_present expect(element).to be_present
# run update again with new metadata # run update again with new metadata
schema[0][:elements][0][:name] = 'new_name' schema[0][:elements][0][:name] = 'new_name'
service = UpdateDb.new(db: db) service = Nilmdb::UpdateDb.new(db: db)
service.run(mock_dbinfo, schema) service.run(mock_dbinfo, schema)
element.reload element.reload
expect(element.name).to eq('new_name') expect(element.name).to eq('new_name')
......
...@@ -54,17 +54,17 @@ RSpec.describe DbFoldersController, type: :request do ...@@ -54,17 +54,17 @@ RSpec.describe DbFoldersController, type: :request do
describe 'PUT update' do describe 'PUT update' do
before do before do
@mock_adapter = double(DbAdapter) # MockDbAdapter.new #instance_double(DbAdapter) @mock_adapter = double(Nilmdb::Adapter) # MockDbAdapter.new #instance_double(DbAdapter)
@db_success = { error: false, msg: 'success' } @node_success = { error: false, msg: 'success' }
@db_failure = { error: true, msg: 'dberror' } @node_failure = { error: true, msg: 'dberror' }
allow(DbAdapter).to receive(:new).and_return(@mock_adapter) allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
end end
context 'with owner permissions' do context 'with owner permissions' do
it 'updates nilmdb and local database' do it 'updates nilmdb and local database' do
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
expect(@mock_adapter).to receive(:set_folder_metadata) expect(@mock_adapter).to receive(:save_folder)
.and_return(@db_success) .and_return(@node_success)
put "/db_folders/#{john_folder.id}.json", put "/db_folders/#{john_folder.id}.json",
params: { name: 'new name' }, params: { name: 'new name' },
headers: @auth_headers headers: @auth_headers
...@@ -75,8 +75,8 @@ RSpec.describe DbFoldersController, type: :request do ...@@ -75,8 +75,8 @@ RSpec.describe DbFoldersController, type: :request do
it 'does not update if nilmdb update fails' do it 'does not update if nilmdb update fails' do
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
expect(@mock_adapter).to receive(:set_folder_metadata) expect(@mock_adapter).to receive(:save_folder)
.and_return(@db_failure) .and_return(@node_failure)
name = john_folder.name name = john_folder.name
put "/db_folders/#{john_folder.id}.json", put "/db_folders/#{john_folder.id}.json",
params: { name: 'new name' }, params: { name: 'new name' },
...@@ -88,7 +88,7 @@ RSpec.describe DbFoldersController, type: :request do ...@@ -88,7 +88,7 @@ RSpec.describe DbFoldersController, type: :request do
it 'returns 422 on invalid parameters' do it 'returns 422 on invalid parameters' do
# name cannot be blank # name cannot be blank
expect(@mock_adapter).to_not receive(:set_folder_metadata) expect(@mock_adapter).to_not receive(:save_folder)
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
put "/db_folders/#{john_folder.id}.json", put "/db_folders/#{john_folder.id}.json",
params: { name: '' }, params: { name: '' },
...@@ -99,8 +99,8 @@ RSpec.describe DbFoldersController, type: :request do ...@@ -99,8 +99,8 @@ RSpec.describe DbFoldersController, type: :request do
it 'only allows configurable parameters to be changed' do it 'only allows configurable parameters to be changed' do
# should ignore start_time and accept description # should ignore start_time and accept description
expect(@mock_adapter).to receive(:set_folder_metadata) expect(@mock_adapter).to receive(:save_folder)
.and_return(@db_success) .and_return(@node_success)
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
start_time = john_folder.start_time start_time = john_folder.start_time
put "/db_folders/#{john_folder.id}.json", put "/db_folders/#{john_folder.id}.json",
...@@ -110,6 +110,15 @@ RSpec.describe DbFoldersController, type: :request do ...@@ -110,6 +110,15 @@ RSpec.describe DbFoldersController, type: :request do
expect(john_folder.reload.start_time).to eq(start_time) expect(john_folder.reload.start_time).to eq(start_time)
expect(john_folder.description).to eq('changed') expect(john_folder.description).to eq('changed')
end end
it 'fails if an adapter cannot be created' do
allow(NodeAdapterFactory).to receive(:from_nilm).and_return(nil)
put "/db_folders/#{john_folder.id}.json",
params: { name: 'new name' },
headers: john.create_new_auth_token
expect(response).to have_http_status(:unprocessable_entity)
expect(response).to have_error_message
end
end end
......
...@@ -62,16 +62,16 @@ RSpec.describe DbStreamsController, type: :request do ...@@ -62,16 +62,16 @@ RSpec.describe DbStreamsController, type: :request do
describe 'PUT update' do describe 'PUT update' do
before do before do
@mock_adapter = double(DbAdapter) # MockDbAdapter.new #instance_double(DbAdapter) @mock_adapter = double(Nilmdb::Adapter) # MockDbAdapter.new #instance_double(DbAdapter)
@db_success = { error: false, msg: 'success' } @db_success = { error: false, msg: 'success' }
@db_failure = { error: true, msg: 'dberror' } @db_failure = { error: true, msg: 'dberror' }
allow(DbAdapter).to receive(:new).and_return(@mock_adapter) allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
end end
context 'with owner permissions' do context 'with owner permissions' do
it 'updates nilmdb and local database' do it 'updates nilmdb and local database' do
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
expect(@mock_adapter).to receive(:set_stream_metadata) expect(@mock_adapter).to receive(:save_stream)
.and_return(@db_success) .and_return(@db_success)
elem = @stream.db_elements.first elem = @stream.db_elements.first
put "/db_streams/#{@stream.id}.json", put "/db_streams/#{@stream.id}.json",
...@@ -91,7 +91,7 @@ RSpec.describe DbStreamsController, type: :request do ...@@ -91,7 +91,7 @@ RSpec.describe DbStreamsController, type: :request do
it 'does not update if nilmdb update fails' do it 'does not update if nilmdb update fails' do
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
expect(@mock_adapter).to receive(:set_stream_metadata) expect(@mock_adapter).to receive(:save_stream)
.and_return(@db_failure) .and_return(@db_failure)
name = @stream.name name = @stream.name
put "/db_streams/#{@stream.id}.json", put "/db_streams/#{@stream.id}.json",
...@@ -104,7 +104,7 @@ RSpec.describe DbStreamsController, type: :request do ...@@ -104,7 +104,7 @@ RSpec.describe DbStreamsController, type: :request do
it 'returns 422 on invalid stream parameters' do it 'returns 422 on invalid stream parameters' do
# name cannot be blank # name cannot be blank
expect(@mock_adapter).to_not receive(:set_stream_metadata) expect(@mock_adapter).to_not receive(:save_stream)
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
put "/db_streams/#{@stream.id}.json", put "/db_streams/#{@stream.id}.json",
params: { name: '' }, params: { name: '' },
...@@ -115,7 +115,7 @@ RSpec.describe DbStreamsController, type: :request do ...@@ -115,7 +115,7 @@ RSpec.describe DbStreamsController, type: :request do
it 'returns 422 on invalid element parameters' do it 'returns 422 on invalid element parameters' do
# elements cannot have the same name # elements cannot have the same name
expect(@mock_adapter).to_not receive(:set_stream_metadata) expect(@mock_adapter).to_not receive(:save_stream)
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
elem1 = @stream.db_elements.first elem1 = @stream.db_elements.first
elemN = @stream.db_elements.last elemN = @stream.db_elements.last
...@@ -129,7 +129,7 @@ RSpec.describe DbStreamsController, type: :request do ...@@ -129,7 +129,7 @@ RSpec.describe DbStreamsController, type: :request do
it 'only allows configurable parameters to be changed' do it 'only allows configurable parameters to be changed' do
# should ignore start_time and accept name # should ignore start_time and accept name
expect(@mock_adapter).to receive(:set_stream_metadata) expect(@mock_adapter).to receive(:save_stream)
.and_return(@db_success) .and_return(@db_success)
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
start_time = @stream.start_time start_time = @stream.start_time
...@@ -140,6 +140,14 @@ RSpec.describe DbStreamsController, type: :request do ...@@ -140,6 +140,14 @@ RSpec.describe DbStreamsController, type: :request do
expect(@stream.reload.start_time).to eq(start_time) expect(@stream.reload.start_time).to eq(start_time)
expect(@stream.name).to eq('changed') expect(@stream.name).to eq('changed')
end end
it 'fails if an adapter cannot be created' do
allow(NodeAdapterFactory).to receive(:from_nilm).and_return(nil)
put "/db_streams/#{@stream.id}.json",
params: { start_time: 10, name: 'changed' },
headers: john.create_new_auth_token
expect(response).to have_http_status(:unprocessable_entity)
expect(response).to have_error_message
end
end end
context 'without owner permissions' do context 'without owner permissions' do
...@@ -167,10 +175,10 @@ RSpec.describe DbStreamsController, type: :request do ...@@ -167,10 +175,10 @@ RSpec.describe DbStreamsController, type: :request do
describe 'POST data' do describe 'POST data' do
before do before do
@mock_adapter = double(DbAdapter) # MockDbAdapter.new #instance_double(DbAdapter) @mock_adapter = double(Nilmdb::Adapter) # MockDbAdapter.new #instance_double(DbAdapter)
@db_success = { error: false, msg: 'success' } @db_success = { error: false, msg: 'success' }
@db_failure = { error: true, msg: 'dberror' } @db_failure = { error: true, msg: 'dberror' }
allow(DbAdapter).to receive(:new).and_return(@mock_adapter) allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
end end
context 'with viewer permissions' do context 'with viewer permissions' do
......
...@@ -137,7 +137,8 @@ RSpec.describe NilmsController, type: :request do ...@@ -137,7 +137,8 @@ RSpec.describe NilmsController, type: :request do
it 'refreshes nilm data when requested' do it 'refreshes nilm data when requested' do
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
[john_nilm, lab_nilm].each do |nilm| [john_nilm, lab_nilm].each do |nilm|
mock_service = UpdateNilm.new mock_adapter = instance_double(Nilmdb::Adapter)
mock_service = UpdateNilm.new(mock_adapter)
expect(mock_service).to receive(:run).and_return StubService.new expect(mock_service).to receive(:run).and_return StubService.new
allow(UpdateNilm).to receive(:new) allow(UpdateNilm).to receive(:new)
.and_return(mock_service) .and_return(mock_service)
...@@ -167,6 +168,13 @@ RSpec.describe NilmsController, type: :request do ...@@ -167,6 +168,13 @@ RSpec.describe NilmsController, type: :request do
describe 'POST create' do describe 'POST create' do
context 'with authenticated user' do context 'with authenticated user' do
it 'creates a NILM' do it 'creates a NILM' do
result = StubService.new
result.add_error("cannot contact database")
@mock_adapter = instance_double(Nilmdb::Adapter,
node_type: 'nilmdb',
refresh: result)
allow(NodeAdapterFactory).to receive(:from_url).and_return(@mock_adapter)
@auth_headers = john.create_new_auth_token @auth_headers = john.create_new_auth_token
post "/nilms.json", post "/nilms.json",
params: {name: 'new', url: 'http://sampleurl/nilmdb'}, params: {name: 'new', url: 'http://sampleurl/nilmdb'},
...@@ -177,7 +185,7 @@ RSpec.describe NilmsController, type: :request do ...@@ -177,7 +185,7 @@ RSpec.describe NilmsController, type: :request do
# make sure the NILM was built # make sure the NILM was built
nilm = Nilm.find_by_name('new') nilm = Nilm.find_by_name('new')
expect(nilm).to_not be nil expect(nilm).to_not be nil
expect(nilm.db.available).to be false expect(@mock_adapter).to have_received(:refresh)
# user should be an admin # user should be an admin
expect(john.admins_nilm?(nilm)).to be true expect(john.admins_nilm?(nilm)).to be true
end end
......
...@@ -10,7 +10,7 @@ class MockDataDbAdapter ...@@ -10,7 +10,7 @@ class MockDataDbAdapter
@raw_count = raw_count @raw_count = raw_count
@data = data @data = data
@last_path = nil @last_path = nil
@url = "http://mockadapter/nilmdb" @url = "http://mockbackend/nilmdb"
end end
def get_data(path, start_time, end_time) def get_data(path, start_time, end_time)
......
...@@ -8,6 +8,7 @@ FactoryBot.define do ...@@ -8,6 +8,7 @@ FactoryBot.define do
name {Faker::Lorem.unique.words(3).join(' ')} name {Faker::Lorem.unique.words(3).join(' ')}
description { Faker::Lorem.sentence } description { Faker::Lorem.sentence }
url {Faker::Internet.unique.url} url {Faker::Internet.unique.url}
node_type 'nilmdb'
transient do transient do
admins [] admins []
owners [] owners []
......
...@@ -9,7 +9,7 @@ require 'rspec/rails' ...@@ -9,7 +9,7 @@ require 'rspec/rails'
require 'support/factory_bot' require 'support/factory_bot'
require 'support/api_messages' require 'support/api_messages'
require 'support/mock_service' require 'support/mock_service'
require 'support/mock_adapter'
# Add additional requires below this line. Rails is not loaded until this point! # Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in # Requires supporting ruby files with custom matchers and macros, etc, in
......
...@@ -13,13 +13,10 @@ RSpec.describe 'BuildDataset' do ...@@ -13,13 +13,10 @@ RSpec.describe 'BuildDataset' do
data = [{id: elem0.id, type: 'raw', values: [[10,0],[11,1],nil,[12,2]]}, data = [{id: elem0.id, type: 'raw', values: [[10,0],[11,1],nil,[12,2]]},
{id: elem1.id, type: 'raw', values: [[10,3],[11,4],nil,[12,5]]}, {id: elem1.id, type: 'raw', values: [[10,3],[11,4],nil,[12,5]]},
{id: elem2.id, type: 'raw', values: [[10,6],[11,7],nil,[12,8]]}] {id: elem2.id, type: 'raw', values: [[10,6],[11,7],nil,[12,8]]}]
@mock_stream_service = instance_double(LoadStreamData, @mock_adapter = instance_double(Nilmdb::Adapter,
run: StubService.new, load_data: { data: data, decimation_factor: 1})
success?: true, allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
data: data, @service = BuildDataset.new(@mock_adapter)
decimation_factor: 1)
allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
end end
it 'builds the dataset' do it 'builds the dataset' do
...@@ -46,13 +43,10 @@ RSpec.describe 'BuildDataset' do ...@@ -46,13 +43,10 @@ RSpec.describe 'BuildDataset' do
data = [{id: elem0.id, type: 'decimated', values: [[10,0,-1,1],[11,1,0,2],nil,[12,2,1,3]]}, data = [{id: elem0.id, type: 'decimated', values: [[10,0,-1,1],[11,1,0,2],nil,[12,2,1,3]]},
{id: elem1.id, type: 'decimated', values: [[10,3,2,4],[11,4,3,5],nil,[12,5,6,7]]}, {id: elem1.id, type: 'decimated', values: [[10,3,2,4],[11,4,3,5],nil,[12,5,6,7]]},
{id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}] {id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}]
@mock_stream_service = instance_double(LoadStreamData, @mock_adapter = instance_double(Nilmdb::Adapter,
run: StubService.new, load_data: { data: data, decimation_factor: 4})
success?: true, allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
data: data, @service = BuildDataset.new(@mock_adapter)
decimation_factor: 4)
allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
end end
it 'omits event elements' do it 'omits event elements' do
...@@ -75,13 +69,10 @@ RSpec.describe 'BuildDataset' do ...@@ -75,13 +69,10 @@ RSpec.describe 'BuildDataset' do
data = [{id: elem0.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}, data = [{id: elem0.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]},
{id: elem1.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}, {id: elem1.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]},
{id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}] {id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}]
@mock_stream_service = instance_double(LoadStreamData, @mock_adapter = instance_double(Nilmdb::Adapter,
run: StubService.new, load_data: { data: data, decimation_factor: 1})
success?: true, #allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
data: data, @service = BuildDataset.new(@mock_adapter)
decimation_factor: 1)
allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
end end
it 'returns no data' do it 'returns no data' do
...@@ -96,13 +87,10 @@ RSpec.describe 'BuildDataset' do ...@@ -96,13 +87,10 @@ RSpec.describe 'BuildDataset' do
data = [{id: elem0.id, type: 'raw', values: []}, data = [{id: elem0.id, type: 'raw', values: []},
{id: elem1.id, type: 'raw', values: []}, {id: elem1.id, type: 'raw', values: []},
{id: elem2.id, type: 'raw', values: []}] {id: elem2.id, type: 'raw', values: []}]
@mock_stream_service = instance_double(LoadStreamData, @mock_adapter = instance_double(Nilmdb::Adapter,
run: StubService.new, load_data:{data: data, decimation_factor: 1})
success?: true, #allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
data: data, @service = BuildDataset.new(@mock_adapter)
decimation_factor: 1)
allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
end end
it 'returns no data' do it 'returns no data' do
...@@ -111,14 +99,9 @@ RSpec.describe 'BuildDataset' do ...@@ -111,14 +99,9 @@ RSpec.describe 'BuildDataset' do
end end
describe 'when stream service returns error' do describe 'when stream service returns error' do
before do before do
@mock_stream_service = instance_double(LoadStreamData, @mock_adapter = instance_double(Nilmdb::Adapter, load_data: nil)
run: StubService.new, #allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
success?: false, @service = BuildDataset.new(@mock_adapter)
errors: ['generic error'],
warnings: [],
notices: [])
allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
end end
it 'returns error' do it 'returns error' do
......
# frozen_string_literal: true # frozen_string_literal: true
require 'rails_helper' require 'rails_helper'
RSpec.describe 'LoadElementData' do RSpec.describe 'LoadElementData' do
let(:db) { create(:db, max_points_per_plot: 100) } let(:db) { create(:db, max_points_per_plot: 100) }
...@@ -14,19 +15,16 @@ RSpec.describe 'LoadElementData' do ...@@ -14,19 +15,16 @@ RSpec.describe 'LoadElementData' do
@stream_data = [{id: @elem0.id, values: 'mock0'}, @stream_data = [{id: @elem0.id, values: 'mock0'},
{id: @elem1.id, values: 'mock1'}, {id: @elem1.id, values: 'mock1'},
{id: @elem2.id, values: 'mock2'}] {id: @elem2.id, values: 'mock2'}]
@mock_stream_service = MockLoadStreamData.new( @mock_adapter = MockAdapter.new([stream: @db_stream,
[stream: @db_stream, data: @stream_data]) data: @stream_data])
allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service) allow(Nilmdb::Adapter).to receive(:new).and_return(@mock_adapter)
end end
it 'makes one request for the stream data' do it 'makes one request for the stream data' do
expect(@mock_stream_service).to receive(:data).and_return(@stream_data) #expect(@mock_adapter).to receive(:load_data)
service = LoadElementData.new service = LoadElementData.new
service.run([@elem0,@elem2],0,100) service.run([@elem0,@elem2],0,100)
expect(service.success?).to be true expect(service.success?).to be true
expect(service.data).to eq [ expect(service.data).to eq [@stream_data[0], @stream_data[2]]
{id: @elem0.id, values: 'mock0'},
{id: @elem2.id, values: 'mock2'}
]
end end
end end
...@@ -43,10 +41,10 @@ RSpec.describe 'LoadElementData' do ...@@ -43,10 +41,10 @@ RSpec.describe 'LoadElementData' do
@elem3 = create(:db_element, column: 3, db_stream: @db_stream2) @elem3 = create(:db_element, column: 3, db_stream: @db_stream2)
@stream2_data = [{id: @elem2.id, values: 'mock2'}, @stream2_data = [{id: @elem2.id, values: 'mock2'},
{id: @elem3.id, values: 'mock3'}] {id: @elem3.id, values: 'mock3'}]
@mock_stream_service = MockLoadStreamData.new( @mock_adapter = MockAdapter.new(
[{stream: @db_stream1, data: @stream1_data}, [{stream: @db_stream1, data: @stream1_data},
{stream: @db_stream2, data: @stream2_data}]) {stream: @db_stream2, data: @stream2_data}])
allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service) allow(Nilmdb::Adapter).to receive(:new).and_return(@mock_adapter)
end end
it 'makes one request per stream' do it 'makes one request per stream' do
...@@ -57,7 +55,7 @@ RSpec.describe 'LoadElementData' do ...@@ -57,7 +55,7 @@ RSpec.describe 'LoadElementData' do
{id: @elem0.id, values: 'mock0'}, {id: @elem0.id, values: 'mock0'},
{id: @elem3.id, values: 'mock3'} {id: @elem3.id, values: 'mock3'}
] ]
expect(@mock_stream_service.run_count).to eq 2 expect(@mock_adapter.run_count).to eq 2
end end
end end
...@@ -76,10 +74,10 @@ RSpec.describe 'LoadElementData' do ...@@ -76,10 +74,10 @@ RSpec.describe 'LoadElementData' do
@elem3 = create(:db_element, column: 3, db_stream: @db_stream2) @elem3 = create(:db_element, column: 3, db_stream: @db_stream2)
@stream2_data = [{id: @elem2.id, values: 'mock2'}, @stream2_data = [{id: @elem2.id, values: 'mock2'},
{id: @elem3.id, values: 'mock3'}] {id: @elem3.id, values: 'mock3'}]
@mock_stream_service = MockLoadStreamData.new( @mock_adapter = MockAdapter.new(
[{stream: @db_stream1, data: @stream1_data}, [{stream: @db_stream1, data: @stream1_data},
{stream: @db_stream2, data: nil}]) {stream: @db_stream2, data: nil}])
allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service) allow(Nilmdb::Adapter).to receive(:new).and_return(@mock_adapter)
end end
it 'fills in the data that is available' do it 'fills in the data that is available' do
service = LoadElementData.new service = LoadElementData.new
...@@ -89,7 +87,7 @@ RSpec.describe 'LoadElementData' do ...@@ -89,7 +87,7 @@ RSpec.describe 'LoadElementData' do
{id: @elem0.id, values: 'mock0'}, {id: @elem0.id, values: 'mock0'},
{id: @elem3.id, type: 'error', values: nil} {id: @elem3.id, type: 'error', values: nil}
] ]
expect(@mock_stream_service.run_count).to eq 2 expect(@mock_adapter.run_count).to eq 2
end end
end end
...@@ -102,10 +100,10 @@ RSpec.describe 'LoadElementData' do ...@@ -102,10 +100,10 @@ RSpec.describe 'LoadElementData' do
let(:user) {create(:user)} let(:user) {create(:user)}
it 'updates the streams', :vcr do it 'updates the streams', :vcr do
adapter = DbAdapter.new(url) @adapter = Nilmdb::Adapter.new(url)
service = CreateNilm.new service = CreateNilm.new(@adapter)
service.run(name: 'test', url: url, owner: user) service.run(name: 'test', url: url, owner: user)
db = service.nilm.db #db = service.nilm.db
#request data from ac-power (15 Jun 2015 - 27 Jun 2015) #request data from ac-power (15 Jun 2015 - 27 Jun 2015)
# pump-events (04 Feb 2013 - 23 Feb 2013) # pump-events (04 Feb 2013 - 23 Feb 2013)
elem1 = DbStream.find_by_path("/tutorial/ac-power").db_elements.first elem1 = DbStream.find_by_path("/tutorial/ac-power").db_elements.first
......
...@@ -3,11 +3,11 @@ ...@@ -3,11 +3,11 @@
require 'rails_helper' require 'rails_helper'
describe 'EditStream service' do describe 'EditStream service' do
let(:db_adapter) { instance_double(DbAdapter) } let(:mock_adapter) { instance_double(Nilmdb::Adapter) }
let(:stream) { FactoryBot.create(:db_stream, path: '/stream/path', name: 'old') } let(:stream) { FactoryBot.create(:db_stream, path: '/stream/path', name: 'old') }
let(:element) { stream.db_elements.first} let(:element) { stream.db_elements.first}
let(:service) { EditStream.new(db_adapter) } let(:service) { EditStream.new(mock_adapter) }
# db adapter return values # db backend return values
let(:success) { { error: false, msg: '' } } let(:success) { { error: false, msg: '' } }
let(:error) { { error: true, msg: 'server error' } } let(:error) { { error: true, msg: 'server error' } }
...@@ -15,10 +15,10 @@ describe 'EditStream service' do ...@@ -15,10 +15,10 @@ describe 'EditStream service' do
attribs = { id: 0, invalid_attrib: 'ignore', attribs = { id: 0, invalid_attrib: 'ignore',
name: 'new name', name_abbrev: 'nn', name: 'new name', name_abbrev: 'nn',
db_elements_attributes: [{id: element.id, name: 'new!'}] } db_elements_attributes: [{id: element.id, name: 'new!'}] }
allow(db_adapter).to receive(:set_stream_metadata).and_return(success) allow(mock_adapter).to receive(:save_stream).and_return(success)
# run the service, it should call the adapter and save the folder # run the service, it should call the backend and save the folder
service.run(stream, attribs) service.run(stream, attribs)
expect(db_adapter).to have_received(:set_stream_metadata).with(stream) expect(mock_adapter).to have_received(:save_stream).with(stream)
expect(stream.name).to eq('new name') expect(stream.name).to eq('new name')
expect(stream.name_abbrev).to eq('nn') expect(stream.name_abbrev).to eq('nn')
expect(DbElement.find(element.id).name).to eq('new!') expect(DbElement.find(element.id).name).to eq('new!')
...@@ -26,17 +26,17 @@ describe 'EditStream service' do ...@@ -26,17 +26,17 @@ describe 'EditStream service' do
it 'checks to make sure new attributes are valid' do it 'checks to make sure new attributes are valid' do
attribs = { name: '' } # cannot have empty name attribs = { name: '' } # cannot have empty name
allow(db_adapter).to receive(:set_stream_metadata).and_return(success) allow(mock_adapter).to receive(:save_stream).and_return(success)
# run the service, it shouldn't call the database adapter # run the service, it shouldn't call the database backend
service.run(stream, attribs) service.run(stream, attribs)
expect(service.errors?).to be true expect(service.errors?).to be true
expect(db_adapter).to_not have_received(:set_stream_metadata) expect(mock_adapter).to_not have_received(:save_stream)
end end
it 'does not change stream or elements on a server error' do it 'does not change stream or elements on a server error' do
attribs = { name: 'new', attribs = { name: 'new',
db_elements_attributes: [{id: element.id, name: 'new'}]} db_elements_attributes: [{id: element.id, name: 'new'}]}
allow(db_adapter).to receive(:set_stream_metadata).and_return(error) allow(mock_adapter).to receive(:save_stream).and_return(error)
allow(stream).to receive(:save!) allow(stream).to receive(:save!)
allow(element).to receive(:save!) allow(element).to receive(:save!)
# run the service, it shouldn't save the folder object # run the service, it shouldn't save the folder object
......
...@@ -3,10 +3,10 @@ ...@@ -3,10 +3,10 @@
require 'rails_helper' require 'rails_helper'
describe 'EditFolder service' do describe 'EditFolder service' do
let(:db_adapter) { instance_double(DbAdapter) } let(:mock_adapter) { instance_double(Nilmdb::Adapter) }
let(:folder) { DbFolder.new(path: '/folder/path', name: 'old') } let(:folder) { DbFolder.new(path: '/folder/path', name: 'old') }
let(:service) { EditFolder.new(db_adapter) } let(:service) { EditFolder.new(mock_adapter) }
# db adapter return values # mock_adapter return values
let(:success) { { error: false, msg: '' } } let(:success) { { error: false, msg: '' } }
let(:error) { { error: true, msg: 'server error' } } let(:error) { { error: true, msg: 'server error' } }
...@@ -14,10 +14,10 @@ describe 'EditFolder service' do ...@@ -14,10 +14,10 @@ describe 'EditFolder service' do
attribs = { id: 0, invalid_attrib: 'ignore', attribs = { id: 0, invalid_attrib: 'ignore',
name: 'new', description: 'updated' } name: 'new', description: 'updated' }
allow(folder).to receive(:save!) allow(folder).to receive(:save!)
allow(db_adapter).to receive(:set_folder_metadata).and_return(success) allow(mock_adapter).to receive(:save_folder).and_return(success)
# run the service, it should call the adapter and save the folder # run the service, it should call the backend and save the folder
service.run(folder, attribs) service.run(folder, attribs)
expect(db_adapter).to have_received(:set_folder_metadata).with(folder) expect(mock_adapter).to have_received(:save_folder).with(folder)
expect(folder.name).to eq('new') expect(folder.name).to eq('new')
expect(folder.description).to eq('updated') expect(folder.description).to eq('updated')
expect(folder).to have_received(:save!) expect(folder).to have_received(:save!)
...@@ -25,16 +25,16 @@ describe 'EditFolder service' do ...@@ -25,16 +25,16 @@ describe 'EditFolder service' do
it 'checks to make sure new attributes are valid' do it 'checks to make sure new attributes are valid' do
attribs = { name: '' } # cannot have empty name attribs = { name: '' } # cannot have empty name
allow(db_adapter).to receive(:set_folder_metadata).and_return(success) allow(mock_adapter).to receive(:save_folder).and_return(success)
# run the service, it shouldn't call the database adapter # run the service, it shouldn't call the database backend
service.run(folder, attribs) service.run(folder, attribs)
expect(service.errors?).to be true expect(service.errors?).to be true
expect(db_adapter).to_not have_received(:set_folder_metadata) expect(mock_adapter).to_not have_received(:save_folder)
end end
it 'does not change folder on a server error' do it 'does not change folder on a server error' do
attribs = { name: 'new' } attribs = { name: 'new' }
allow(db_adapter).to receive(:set_folder_metadata).and_return(error) allow(mock_adapter).to receive(:save_folder).and_return(error)
allow(folder).to receive(:save!) allow(folder).to receive(:save!)
# run the service, it shouldn't save the folder object # run the service, it shouldn't save the folder object
service.run(folder, attribs) service.run(folder, attribs)
......
...@@ -8,13 +8,13 @@ describe 'UpdateJouleModules' do ...@@ -8,13 +8,13 @@ describe 'UpdateJouleModules' do
nilm = create(:nilm) nilm = create(:nilm)
nilm.joule_modules << create(:joule_module, name: 'prev1') nilm.joule_modules << create(:joule_module, name: 'prev1')
nilm.joule_modules << create(:joule_module, name: 'prev2') nilm.joule_modules << create(:joule_module, name: 'prev2')
adapter = MockJouleAdapter.new backend = MockJouleAdapter.new
adapter.add_module("new1",inputs={i1: '/path/1'}, backend.add_module("new1",inputs={i1: '/path/1'},
outputs={o1: '/path/2'}) outputs={o1: '/path/2'})
adapter.add_module("new2",inputs={i1: '/path/3',i2: '/path/4'}, backend.add_module("new2",inputs={i1: '/path/3',i2: '/path/4'},
outputs={o1: '/path/5',o2: '/path/5'}) outputs={o1: '/path/5',o2: '/path/5'})
service = UpdateJouleModules.new(nilm) service = UpdateJouleModules.new(nilm)
service.run(adapter.module_info) service.run(backend.module_info)
expect(service.success?).to be true expect(service.success?).to be true
# new modules are in the database # new modules are in the database
expect(nilm.joule_modules.find_by_name('new1')).to be_present expect(nilm.joule_modules.find_by_name('new1')).to be_present
...@@ -31,21 +31,21 @@ describe 'UpdateJouleModules' do ...@@ -31,21 +31,21 @@ describe 'UpdateJouleModules' do
end end
it 'produces a warning if a stream is not in the database' do it 'produces a warning if a stream is not in the database' do
nilm = create(:nilm) nilm = create(:nilm)
adapter = MockJouleAdapter.new backend = MockJouleAdapter.new
adapter.add_module("module",outputs={output: '/missing/path'}) backend.add_module("module",outputs={output: '/missing/path'})
service = UpdateJouleModules.new(nilm) service = UpdateJouleModules.new(nilm)
service.run(adapter.module_info) service.run(backend.module_info)
expect(service.warnings?).to be true expect(service.warnings?).to be true
end end
it 'links db_stream to the pipe if the stream is in the database' do it 'links db_stream to the pipe if the stream is in the database' do
nilm = create(:nilm) nilm = create(:nilm)
nilm.db.db_streams << create(:db_stream, path: '/matched/path1') nilm.db.db_streams << create(:db_stream, path: '/matched/path1')
nilm.db.db_streams << create(:db_stream, path: '/matched/path2') nilm.db.db_streams << create(:db_stream, path: '/matched/path2')
adapter = MockJouleAdapter.new backend = MockJouleAdapter.new
adapter.add_module("module",inputs={input: '/matched/path1'}, backend.add_module("module",inputs={input: '/matched/path1'},
outputs={output: '/matched/path2'}) outputs={output: '/matched/path2'})
service = UpdateJouleModules.new(nilm) service = UpdateJouleModules.new(nilm)
service.run(adapter.module_info) service.run(backend.module_info)
expect(service.warnings?).to be false expect(service.warnings?).to be false
end end
it 'returns error if Joule server is unavailable' do it 'returns error if Joule server is unavailable' do
......
...@@ -5,16 +5,16 @@ test_nilm_url = 'http://localhost:8080/nilmdb' ...@@ -5,16 +5,16 @@ test_nilm_url = 'http://localhost:8080/nilmdb'
RSpec.describe 'CreateNilm' do RSpec.describe 'CreateNilm' do
describe 'build' do describe 'build' do
it 'creates and populates a Db object', :vcr do it 'creates and populates a Db object' do
result = StubService.new
result.add_error("unable to contact database")
# mock the database updater # mock the database updater
service = instance_double(UpdateDb, @mock_adapter = instance_double(Nilmdb::Adapter,
run: StubService.new, refresh: result,
errors: ['cannot contact database'], node_type: 'nilmdb')
warnings: [])
allow(UpdateDb).to receive(:new).and_return(service)
user = create(:user, first_name: "John") user = create(:user, first_name: "John")
# run the NILM creation # run the NILM creation
nilm_creator = CreateNilm.new nilm_creator = CreateNilm.new(@mock_adapter)
nilm_creator.run( nilm_creator.run(
name: 'test', name: 'test',
description: 'test description', description: 'test description',
...@@ -28,7 +28,7 @@ RSpec.describe 'CreateNilm' do ...@@ -28,7 +28,7 @@ RSpec.describe 'CreateNilm' do
expect(nilm).to_not be nil expect(nilm).to_not be nil
expect(nilm.db).to be_present expect(nilm.db).to be_present
# ...and the database has been populated # ...and the database has been populated
expect(service).to have_received(:run) expect(@mock_adapter).to have_received(:refresh)
expect(user.owns_nilm?(nilm)).to be true expect(user.owns_nilm?(nilm)).to be true
end end
end end
......
...@@ -4,41 +4,27 @@ require 'rails_helper' ...@@ -4,41 +4,27 @@ require 'rails_helper'
describe 'UpdateNilm' do describe 'UpdateNilm' do
it 'updates the db when NilmDB is accessible' do it 'updates the db when NilmDB is accessible' do
mock_service = instance_double(UpdateDb, mock_adapter = instance_double(Nilmdb::Adapter,
run: StubService.new, refresh: StubService.new)
errors: [],
warnings: [])
allow(UpdateDb).to receive(:new)
.and_return(mock_service)
mock_adapter = double(JouleAdapter)
allow(JouleAdapter).to receive(:new).and_return(mock_adapter)
expect(mock_adapter).to receive(:module_info).and_return([])
nilm = create(:nilm) nilm = create(:nilm)
service = UpdateNilm.new() service = UpdateNilm.new(mock_adapter)
service.run(nilm) service.run(nilm)
expect(service.success?).to be true expect(service.success?).to be true
end end
it 'returns error if db is nil' do it 'returns error if db is nil' do
mock_adapter = instance_double(Nilmdb::Adapter)
nilm = Nilm.create(name: 'test', url: 'invalid') nilm = Nilm.create(name: 'test', url: 'invalid')
mock_service = instance_double(UpdateDb, service = UpdateNilm.new(mock_adapter)
run: StubService.new)
allow(UpdateDb).to receive(:new)
.and_return(mock_service)
service = UpdateNilm.new()
service.run(nilm) service.run(nilm)
expect(service.success?).to be false expect(service.success?).to be false
expect(mock_service).to_not have_received(:run)
end end
it 'returns error if db is offline' do it 'returns error if db is offline' do
nilm = create(:nilm)
resp = StubService.new resp = StubService.new
resp.add_error('offline') resp.add_error('offline')
mock_service = instance_double(UpdateDb, mock_adapter = instance_double(Nilmdb::Adapter,
run: resp) refresh: resp)
allow(UpdateDb).to receive(:new)
.and_return(mock_service)
nilm = create(:nilm) nilm = create(:nilm)
service = UpdateNilm.new() service = UpdateNilm.new(mock_adapter)
service.run(nilm) service.run(nilm)
expect(service.success?).to be false expect(service.success?).to be false
end end
......
class MockLoadStreamData class MockAdapter
include ServiceStatus attr_reader :run_count
attr_reader :data, :run_count
def initialize(dataset) def initialize(dataset)
super() super()
@dataset = dataset @dataset = dataset
@data = nil
@run_count = 0 @run_count = 0
end end
def run(db_stream, start_time, end_time, elements=[], resolution=nil) def load_data(db_stream, start_time, end_time, elements=[], resolution=nil)
@data = @dataset.select{|d| d[:stream]==db_stream}.first[:data] data = @dataset.select{|d| d[:stream]==db_stream}.first[:data]
@run_count += 1 @run_count += 1
if(@data == nil) if data == nil
self.add_error('could not retrieve stream data')
return nil return nil
else
self.reset_messages
end end
return self {data: data, decimation_factor: 1}
end end
end end
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment