Commit 42a5ed30 by John Doe

updated to new Joule API

parent d7333fb0
...@@ -40,6 +40,16 @@ module Joule ...@@ -40,6 +40,16 @@ module Joule
} }
end end
def download_instructions(db_stream, start_time, end_time)
"# --------- JOULE INSTRUCTIONS ----------
#
# raw data can be accessed using the joule cli, run:
#
# $> joule -u #{@backend.url} data read -s #{start_time} -e #{end_time} #{db_stream.path}
#
# ------------------------------------------"
end
def module_interface(joule_module, req) def module_interface(joule_module, req)
@backend.module_interface(joule_module, req) @backend.module_interface(joule_module, req)
end end
......
...@@ -88,12 +88,66 @@ module Joule ...@@ -88,12 +88,66 @@ module Joule
options = { query: query} options = { query: query}
begin begin
resp = self.class.get("#{@url}/data.json", options) resp = self.class.get("#{@url}/data.json", options)
#TODO: handle interval data if resp.code==400 and resp.body.include?('decimated data is not available')
return nil unless resp.success? return {success: false, result: "decimation error"}
end
return {success: false, result: resp.body} unless resp.success?
rescue rescue
return nil return {success: false, result: "connection error"}
end
{success: true, result: resp.parsed_response.symbolize_keys}
end
def load_intervals(joule_id, start_time, end_time)
query = {'id': joule_id}
query['start'] = start_time unless start_time.nil?
query['end'] = end_time unless end_time.nil?
options = { query: query}
begin
resp = self.class.get("#{@url}/data/intervals.json", options)
return {success: false, result: resp.body} unless resp.success?
rescue
return {success: false, result: "connection error"}
end end
resp.parsed_response.symbolize_keys data = []
resp.parsed_response.each do |interval|
data.push([interval[0], 0])
data.push([interval[1], 0])
data.push(nil) # break up the intervals
end
{success: true, result: data}
end
def update_stream(db_stream)
elements = []
db_stream.db_elements.each do |elem|
elements << {name: elem.name,
plottable: elem.plottable,
units: elem.units,
default_min: elem.default_min,
default_max: elem.default_max,
scale_factor: elem.scale_factor,
offset: elem.offset,
display_type: elem.display_type}
end
attrs = { name: db_stream.name,
description: db_stream.description,
elements: elements
}.to_json
begin
response = self.class.put("#{@url}/stream.json",
body: {
id: db_stream.joule_id,
stream: attrs})
rescue
return { error: true, msg: 'cannot contact Joule server' }
end
unless response.success?
return { error: true, msg: "error updating #{db_stream.path} metadata" }
end
{ error: false, msg: 'success' }
end end
end end
end end
...@@ -26,26 +26,40 @@ module Joule ...@@ -26,26 +26,40 @@ module Joule
else else
[db_stream.db.max_points_per_plot,resolution].min [db_stream.db.max_points_per_plot,resolution].min
end end
result = @backend.load_data(db_stream.joule_id, start_time, end_time, resolution) resp = @backend.load_data(db_stream.joule_id, start_time, end_time, resolution)
if result.nil? unless resp[:success]
add_error("cannot get data for [#{db_stream.name}] @ #{@db_backend.url}") if resp[:result] == 'decimation error'
resp = @backend.load_intervals(db_stream.joule_id, start_time, end_time)
if resp[:success]
@data = DataBuilder.build_interval_data(elements, resp[:result])
@data_type = 'interval'
return self return self
end end
end
add_error("cannot get data for [#{db_stream.name}] @ #{@backend.url}: #{resp[:result]}")
return self
end
# convert data into single array with nil's at interval boundaries # convert data into single array with nil's at interval boundaries
result = resp[:result]
data = [] data = []
result[:data].each do |interval| result[:data].each do |interval|
data += interval data += interval
data.push(nil) data.push(nil)
end end
if result[:decimated] if result[:decimation_factor] > 1
@data = DataBuilder.build_decimated_data(elements,data)
@data_type = 'decimated' @data_type = 'decimated'
decimateable_elements =
elements.select{|e| %w(continuous discrete).include? e.display_type}
interval_elements = elements.select{|e| e.display_type=='event'}
@data = DataBuilder.build_decimated_data(decimateable_elements, data) +
DataBuilder.build_intervals_from_decimated_data(interval_elements, data)
else else
@data = DataBuilder.build_raw_data(elements,data) @data = DataBuilder.build_raw_data(elements,data)
@data_type = 'raw' @data_type = 'raw'
end end
#TODO: handle interval data @decimation_factor = result[:decimation_factor]
@decimation_factor = 1 # TODO: fix this self
end end
end end
end end
\ No newline at end of file
...@@ -42,6 +42,7 @@ module Joule ...@@ -42,6 +42,7 @@ module Joule
size_on_disk = 0 size_on_disk = 0
start_time = nil start_time = nil
end_time = nil end_time = nil
locked = false
schema[:children].each do |child_schema| schema[:children].each do |child_schema|
child = db_folder.subfolders.find_by_joule_id(child_schema[:id]) child = db_folder.subfolders.find_by_joule_id(child_schema[:id])
child ||= DbFolder.new(parent: db_folder, db: db_folder.db) child ||= DbFolder.new(parent: db_folder, db: db_folder.db)
...@@ -62,6 +63,7 @@ module Joule ...@@ -62,6 +63,7 @@ module Joule
end end
end end
updated_ids << child_schema[:id] updated_ids << child_schema[:id]
locked = true if child.locked?
end end
# remove any subfolders that are no longer on the folder # remove any subfolders that are no longer on the folder
db_folder.subfolders.where.not(joule_id: updated_ids).destroy_all db_folder.subfolders.where.not(joule_id: updated_ids).destroy_all
...@@ -87,6 +89,7 @@ module Joule ...@@ -87,6 +89,7 @@ module Joule
end_time = [stream.end_time, end_time].max end_time = [stream.end_time, end_time].max
end end
end end
locked=true if stream.locked?
updated_ids << stream_schema[:id] updated_ids << stream_schema[:id]
end end
# remove any streams that are no longer in the folder # remove any streams that are no longer in the folder
...@@ -95,6 +98,7 @@ module Joule ...@@ -95,6 +98,7 @@ module Joule
db_folder.size_on_disk = size_on_disk db_folder.size_on_disk = size_on_disk
db_folder.start_time = start_time db_folder.start_time = start_time
db_folder.end_time = end_time db_folder.end_time = end_time
db_folder.locked = locked
db_folder.save db_folder.save
end end
......
...@@ -36,9 +36,19 @@ module Nilmdb ...@@ -36,9 +36,19 @@ module Nilmdb
decimation_factor: data_service.decimation_factor} decimation_factor: data_service.decimation_factor}
end end
def download_instructions(db_stream, start_time, end_time)
"# --------- NILMTOOL INSTRUCTIONS ----------
#
# raw data can be accessed using nilmtool, run:
#
# $> nilmtool -u #{@backend.url} extract -s @#{start_time} -e @#{end_time} #{db_stream.path}
#
# ------------------------------------------"
end
def node_type def node_type
'nilmdb' 'nilmdb'
end end
end end
end end
...@@ -33,7 +33,7 @@ class DbFolder < ApplicationRecord ...@@ -33,7 +33,7 @@ class DbFolder < ApplicationRecord
# end # end
def self.defined_attributes def self.defined_attributes
[:name, :description, :hidden] [:name, :description, :hidden, :locked]
end end
def insert_stream(stream:) def insert_stream(stream:)
...@@ -53,6 +53,6 @@ class DbFolder < ApplicationRecord ...@@ -53,6 +53,6 @@ class DbFolder < ApplicationRecord
def self.json_keys def self.json_keys
[:id, :name, :description, :path, :hidden, [:id, :name, :description, :path, :hidden,
:start_time, :end_time, :size_on_disk] :start_time, :end_time, :size_on_disk, :locked]
end end
end end
...@@ -29,7 +29,7 @@ class DbStream < ApplicationRecord ...@@ -29,7 +29,7 @@ class DbStream < ApplicationRecord
validates_with DbDataTypeValidator validates_with DbDataTypeValidator
def self.defined_attributes def self.defined_attributes
[:name, :name_abbrev, :description, :hidden, :data_type] [:name, :name_abbrev, :description, :hidden, :data_type, :locked]
end end
# def name_path # def name_path
...@@ -59,7 +59,7 @@ class DbStream < ApplicationRecord ...@@ -59,7 +59,7 @@ class DbStream < ApplicationRecord
def self.json_keys def self.json_keys
[:id, :name, :description, :path, :start_time, [:id, :name, :description, :path, :start_time,
:end_time, :size_on_disk, :total_rows, :total_time, :end_time, :size_on_disk, :total_rows, :total_time,
:data_type, :name_abbrev, :delete_locked, :hidden] :data_type, :name_abbrev, :delete_locked, :locked, :hidden]
end end
end end
...@@ -36,6 +36,7 @@ class BuildDataset ...@@ -36,6 +36,7 @@ class BuildDataset
@legend[:end_time] = end_time @legend[:end_time] = end_time
@legend[:decimation_factor] = result[:decimation_factor] @legend[:decimation_factor] = result[:decimation_factor]
@legend[:num_rows] = @data.length @legend[:num_rows] = @data.length
@legend[:download_instructions] = @node_adapter.download_instructions(db_stream, start_time, end_time)
if @data.empty? if @data.empty?
@legend[:notes] = 'there is no data available over this interval' @legend[:notes] = 'there is no data available over this interval'
elsif @data[0].length!=db_stream.db_elements.length+1 elsif @data[0].length!=db_stream.db_elements.length+1
......
...@@ -24,13 +24,7 @@ ...@@ -24,13 +24,7 @@
# data: [1737x15 double] % the data # data: [1737x15 double] % the data
# textdata: {41x1 cell} % this help text # textdata: {41x1 cell} % this help text
# #
# --------- NILMTOOL INSTRUCTIONS ---------- <%=@legend[:download_instructions].html_safe %>
#
# raw data can be accessed using nilmtool, run:
#
# $> nilmtool -u <%=@db.url%> extract -s @<%=@legend[:start_time]%> -e @<%=@legend[:end_time]%> <%=@db_stream.path%>
#
# ------------------------------------------
# #
<% @data.each do |row|%> <% @data.each do |row|%>
<%= row.join(", ")%> <%= row.join(", ")%>
......
class AddLockedToFolderAndStream < ActiveRecord::Migration[5.2]
def change
add_column :db_streams, :locked, :boolean
add_column :db_folders, :locked, :boolean
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_07_10_014435) do ActiveRecord::Schema.define(version: 2018_08_07_001215) do
create_table "data_views", force: :cascade do |t| create_table "data_views", force: :cascade do |t|
t.integer "user_id" t.integer "user_id"
...@@ -70,6 +70,7 @@ ActiveRecord::Schema.define(version: 2018_07_10_014435) do ...@@ -70,6 +70,7 @@ ActiveRecord::Schema.define(version: 2018_07_10_014435) do
t.integer "end_time", limit: 8 t.integer "end_time", limit: 8
t.integer "size_on_disk", limit: 8 t.integer "size_on_disk", limit: 8
t.integer "joule_id" t.integer "joule_id"
t.boolean "locked"
t.index ["joule_id"], name: "index_db_folders_on_joule_id" t.index ["joule_id"], name: "index_db_folders_on_joule_id"
end end
...@@ -91,6 +92,7 @@ ActiveRecord::Schema.define(version: 2018_07_10_014435) do ...@@ -91,6 +92,7 @@ ActiveRecord::Schema.define(version: 2018_07_10_014435) do
t.integer "size_on_disk", limit: 8 t.integer "size_on_disk", limit: 8
t.integer "db_id" t.integer "db_id"
t.integer "joule_id" t.integer "joule_id"
t.boolean "locked"
t.index ["joule_id"], name: "index_db_streams_on_joule_id" t.index ["joule_id"], name: "index_db_streams_on_joule_id"
end end
......
...@@ -26,9 +26,10 @@ describe Joule::Backend do ...@@ -26,9 +26,10 @@ describe Joule::Backend do
1531248642561047, 1531248642561047,
1531248642581047, 1531248642581047,
200) 200)
expect(resp[:decimated]).to be false expect(resp[:success]).to be true
expect(resp[:data].count).to be > 0 expect(resp[:result][:decimated]).to be false
expect(resp[:data].count).to be < 200 expect(resp[:result][:data].count).to be > 0
expect(resp[:result][:data].count).to be < 200
end end
it 'loads decimated data', :vcr do it 'loads decimated data', :vcr do
...@@ -37,8 +38,9 @@ describe Joule::Backend do ...@@ -37,8 +38,9 @@ describe Joule::Backend do
1531248642561047, 1531248642561047,
1531330705273202, 1531330705273202,
20) 20)
expect(resp[:decimated]).to be true expect(resp[:success]).to be true
expect(resp[:data].count).to be > 0 expect(resp[:result][:decimated]).to be true
expect(resp[:data].count).to be < 200 expect(resp[:result][:data].count).to be > 0
expect(resp[:result][:data].count).to be < 200
end end
end end
...@@ -18,6 +18,13 @@ ...@@ -18,6 +18,13 @@
"datatype": "INT16", "datatype": "INT16",
"keep_us": -1, "keep_us": -1,
"decimate": true, "decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [ "elements": [
{ {
"id": 12, "id": 12,
...@@ -62,6 +69,13 @@ ...@@ -62,6 +69,13 @@
"datatype": "UINT8", "datatype": "UINT8",
"keep_us": -1, "keep_us": -1,
"decimate": true, "decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [ "elements": [
{ {
"id": 14, "id": 14,
...@@ -108,6 +122,13 @@ ...@@ -108,6 +122,13 @@
"datatype": "FLOAT32", "datatype": "FLOAT32",
"keep_us": -1, "keep_us": -1,
"decimate": true, "decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [ "elements": [
{ {
"id": 17, "id": 17,
...@@ -169,6 +190,13 @@ ...@@ -169,6 +190,13 @@
"datatype": "INT32", "datatype": "INT32",
"keep_us": -1, "keep_us": -1,
"decimate": true, "decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [ "elements": [
{ {
"id": 20, "id": 20,
...@@ -219,6 +247,13 @@ ...@@ -219,6 +247,13 @@
"datatype": "UINT16", "datatype": "UINT16",
"keep_us": -1, "keep_us": -1,
"decimate": true, "decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [ "elements": [
{ {
"id": 23, "id": 23,
......
...@@ -191,6 +191,7 @@ RSpec.describe DbStreamsController, type: :request do ...@@ -191,6 +191,7 @@ RSpec.describe DbStreamsController, type: :request do
num_rows: 2, num_rows: 2,
decimation_factor: 1, decimation_factor: 1,
notes: 'note_test_string', notes: 'note_test_string',
download_instructions: 'how to download from CLI',
columns: [{index: 1, name: 'time', units: 'us'}, columns: [{index: 1, name: 'time', units: 'us'},
{index: 2, name: 'e1', units: 'watts'}, {index: 2, name: 'e1', units: 'watts'},
{index: 3, name: 'e2', units: 'joules'}] {index: 3, name: 'e2', units: 'joules'}]
......
...@@ -14,7 +14,8 @@ RSpec.describe 'BuildDataset' do ...@@ -14,7 +14,8 @@ RSpec.describe 'BuildDataset' do
{id: elem1.id, type: 'raw', values: [[10,3],[11,4],nil,[12,5]]}, {id: elem1.id, type: 'raw', values: [[10,3],[11,4],nil,[12,5]]},
{id: elem2.id, type: 'raw', values: [[10,6],[11,7],nil,[12,8]]}] {id: elem2.id, type: 'raw', values: [[10,6],[11,7],nil,[12,8]]}]
@mock_adapter = instance_double(Nilmdb::Adapter, @mock_adapter = instance_double(Nilmdb::Adapter,
load_data: { data: data, decimation_factor: 1}) load_data: { data: data, decimation_factor: 1},
download_instructions: "stub")
allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter) allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
@service = BuildDataset.new(@mock_adapter) @service = BuildDataset.new(@mock_adapter)
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
...@@ -44,7 +45,8 @@ RSpec.describe 'BuildDataset' do ...@@ -44,7 +45,8 @@ RSpec.describe 'BuildDataset' do
{id: elem1.id, type: 'decimated', values: [[10,3,2,4],[11,4,3,5],nil,[12,5,6,7]]}, {id: elem1.id, type: 'decimated', values: [[10,3,2,4],[11,4,3,5],nil,[12,5,6,7]]},
{id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}] {id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}]
@mock_adapter = instance_double(Nilmdb::Adapter, @mock_adapter = instance_double(Nilmdb::Adapter,
load_data: { data: data, decimation_factor: 4}) load_data: { data: data, decimation_factor: 4},
download_instructions: "stub")
allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter) allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
@service = BuildDataset.new(@mock_adapter) @service = BuildDataset.new(@mock_adapter)
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
...@@ -70,7 +72,8 @@ RSpec.describe 'BuildDataset' do ...@@ -70,7 +72,8 @@ RSpec.describe 'BuildDataset' do
{id: elem1.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}, {id: elem1.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]},
{id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}] {id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}]
@mock_adapter = instance_double(Nilmdb::Adapter, @mock_adapter = instance_double(Nilmdb::Adapter,
load_data: { data: data, decimation_factor: 1}) load_data: { data: data, decimation_factor: 1},
download_instructions: "stub")
#allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service) #allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new(@mock_adapter) @service = BuildDataset.new(@mock_adapter)
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
...@@ -88,7 +91,8 @@ RSpec.describe 'BuildDataset' do ...@@ -88,7 +91,8 @@ RSpec.describe 'BuildDataset' do
{id: elem1.id, type: 'raw', values: []}, {id: elem1.id, type: 'raw', values: []},
{id: elem2.id, type: 'raw', values: []}] {id: elem2.id, type: 'raw', values: []}]
@mock_adapter = instance_double(Nilmdb::Adapter, @mock_adapter = instance_double(Nilmdb::Adapter,
load_data:{data: data, decimation_factor: 1}) load_data:{data: data, decimation_factor: 1},
download_instructions: "stub")
#allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service) #allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new(@mock_adapter) @service = BuildDataset.new(@mock_adapter)
@service.run(db_stream,0,100) @service.run(db_stream,0,100)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment