Commit 42a5ed30 by John Doe

updated to new Joule API

parent d7333fb0
......@@ -40,6 +40,16 @@ module Joule
}
end
def download_instructions(db_stream, start_time, end_time)
"# --------- JOULE INSTRUCTIONS ----------
#
# raw data can be accessed using the joule cli, run:
#
# $> joule -u #{@backend.url} data read -s #{start_time} -e #{end_time} #{db_stream.path}
#
# ------------------------------------------"
end
def module_interface(joule_module, req)
@backend.module_interface(joule_module, req)
end
......
......@@ -88,12 +88,66 @@ module Joule
options = { query: query}
begin
resp = self.class.get("#{@url}/data.json", options)
#TODO: handle interval data
return nil unless resp.success?
if resp.code==400 and resp.body.include?('decimated data is not available')
return {success: false, result: "decimation error"}
end
return {success: false, result: resp.body} unless resp.success?
rescue
return nil
return {success: false, result: "connection error"}
end
resp.parsed_response.symbolize_keys
{success: true, result: resp.parsed_response.symbolize_keys}
end
def load_intervals(joule_id, start_time, end_time)
query = {'id': joule_id}
query['start'] = start_time unless start_time.nil?
query['end'] = end_time unless end_time.nil?
options = { query: query}
begin
resp = self.class.get("#{@url}/data/intervals.json", options)
return {success: false, result: resp.body} unless resp.success?
rescue
return {success: false, result: "connection error"}
end
data = []
resp.parsed_response.each do |interval|
data.push([interval[0], 0])
data.push([interval[1], 0])
data.push(nil) # break up the intervals
end
{success: true, result: data}
end
def update_stream(db_stream)
elements = []
db_stream.db_elements.each do |elem|
elements << {name: elem.name,
plottable: elem.plottable,
units: elem.units,
default_min: elem.default_min,
default_max: elem.default_max,
scale_factor: elem.scale_factor,
offset: elem.offset,
display_type: elem.display_type}
end
attrs = { name: db_stream.name,
description: db_stream.description,
elements: elements
}.to_json
begin
response = self.class.put("#{@url}/stream.json",
body: {
id: db_stream.joule_id,
stream: attrs})
rescue
return { error: true, msg: 'cannot contact Joule server' }
end
unless response.success?
return { error: true, msg: "error updating #{db_stream.path} metadata" }
end
{ error: false, msg: 'success' }
end
end
end
......@@ -26,26 +26,40 @@ module Joule
else
[db_stream.db.max_points_per_plot,resolution].min
end
result = @backend.load_data(db_stream.joule_id, start_time, end_time, resolution)
if result.nil?
add_error("cannot get data for [#{db_stream.name}] @ #{@db_backend.url}")
resp = @backend.load_data(db_stream.joule_id, start_time, end_time, resolution)
unless resp[:success]
if resp[:result] == 'decimation error'
resp = @backend.load_intervals(db_stream.joule_id, start_time, end_time)
if resp[:success]
@data = DataBuilder.build_interval_data(elements, resp[:result])
@data_type = 'interval'
return self
end
end
add_error("cannot get data for [#{db_stream.name}] @ #{@backend.url}: #{resp[:result]}")
return self
end
# convert data into single array with nil's at interval boundaries
result = resp[:result]
data = []
result[:data].each do |interval|
data += interval
data.push(nil)
end
if result[:decimated]
@data = DataBuilder.build_decimated_data(elements,data)
if result[:decimation_factor] > 1
@data_type = 'decimated'
decimateable_elements =
elements.select{|e| %w(continuous discrete).include? e.display_type}
interval_elements = elements.select{|e| e.display_type=='event'}
@data = DataBuilder.build_decimated_data(decimateable_elements, data) +
DataBuilder.build_intervals_from_decimated_data(interval_elements, data)
else
@data = DataBuilder.build_raw_data(elements,data)
@data_type = 'raw'
end
#TODO: handle interval data
@decimation_factor = 1 # TODO: fix this
@decimation_factor = result[:decimation_factor]
self
end
end
end
\ No newline at end of file
......@@ -42,6 +42,7 @@ module Joule
size_on_disk = 0
start_time = nil
end_time = nil
locked = false
schema[:children].each do |child_schema|
child = db_folder.subfolders.find_by_joule_id(child_schema[:id])
child ||= DbFolder.new(parent: db_folder, db: db_folder.db)
......@@ -62,6 +63,7 @@ module Joule
end
end
updated_ids << child_schema[:id]
locked = true if child.locked?
end
# remove any subfolders that are no longer on the folder
db_folder.subfolders.where.not(joule_id: updated_ids).destroy_all
......@@ -87,6 +89,7 @@ module Joule
end_time = [stream.end_time, end_time].max
end
end
locked=true if stream.locked?
updated_ids << stream_schema[:id]
end
# remove any streams that are no longer in the folder
......@@ -95,6 +98,7 @@ module Joule
db_folder.size_on_disk = size_on_disk
db_folder.start_time = start_time
db_folder.end_time = end_time
db_folder.locked = locked
db_folder.save
end
......
......@@ -36,9 +36,19 @@ module Nilmdb
decimation_factor: data_service.decimation_factor}
end
def download_instructions(db_stream, start_time, end_time)
"# --------- NILMTOOL INSTRUCTIONS ----------
#
# raw data can be accessed using nilmtool, run:
#
# $> nilmtool -u #{@backend.url} extract -s @#{start_time} -e @#{end_time} #{db_stream.path}
#
# ------------------------------------------"
end
def node_type
'nilmdb'
end
end
end
......@@ -33,7 +33,7 @@ class DbFolder < ApplicationRecord
# end
def self.defined_attributes
[:name, :description, :hidden]
[:name, :description, :hidden, :locked]
end
def insert_stream(stream:)
......@@ -53,6 +53,6 @@ class DbFolder < ApplicationRecord
def self.json_keys
[:id, :name, :description, :path, :hidden,
:start_time, :end_time, :size_on_disk]
:start_time, :end_time, :size_on_disk, :locked]
end
end
......@@ -29,7 +29,7 @@ class DbStream < ApplicationRecord
validates_with DbDataTypeValidator
def self.defined_attributes
[:name, :name_abbrev, :description, :hidden, :data_type]
[:name, :name_abbrev, :description, :hidden, :data_type, :locked]
end
# def name_path
......@@ -59,7 +59,7 @@ class DbStream < ApplicationRecord
def self.json_keys
[:id, :name, :description, :path, :start_time,
:end_time, :size_on_disk, :total_rows, :total_time,
:data_type, :name_abbrev, :delete_locked, :hidden]
:data_type, :name_abbrev, :delete_locked, :locked, :hidden]
end
end
......@@ -31,11 +31,12 @@ class BuildDataset
return self
end
@data = _build_dataset(result[:data])
@legend[:columns] = _build_legend_columns(result[:data], db_stream)
@legend[:start_time] = start_time
@legend[:end_time] = end_time
@legend[:decimation_factor] = result[:decimation_factor]
@legend[:num_rows] = @data.length
@legend[:columns] = _build_legend_columns(result[:data], db_stream)
@legend[:start_time] = start_time
@legend[:end_time] = end_time
@legend[:decimation_factor] = result[:decimation_factor]
@legend[:num_rows] = @data.length
@legend[:download_instructions] = @node_adapter.download_instructions(db_stream, start_time, end_time)
if @data.empty?
@legend[:notes] = 'there is no data available over this interval'
elsif @data[0].length!=db_stream.db_elements.length+1
......
......@@ -24,13 +24,7 @@
# data: [1737x15 double] % the data
# textdata: {41x1 cell} % this help text
#
# --------- NILMTOOL INSTRUCTIONS ----------
#
# raw data can be accessed using nilmtool, run:
#
# $> nilmtool -u <%=@db.url%> extract -s @<%=@legend[:start_time]%> -e @<%=@legend[:end_time]%> <%=@db_stream.path%>
#
# ------------------------------------------
<%=@legend[:download_instructions].html_safe %>
#
<% @data.each do |row|%>
<%= row.join(", ")%>
......
class AddLockedToFolderAndStream < ActiveRecord::Migration[5.2]
def change
add_column :db_streams, :locked, :boolean
add_column :db_folders, :locked, :boolean
end
end
......@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_07_10_014435) do
ActiveRecord::Schema.define(version: 2018_08_07_001215) do
create_table "data_views", force: :cascade do |t|
t.integer "user_id"
......@@ -70,6 +70,7 @@ ActiveRecord::Schema.define(version: 2018_07_10_014435) do
t.integer "end_time", limit: 8
t.integer "size_on_disk", limit: 8
t.integer "joule_id"
t.boolean "locked"
t.index ["joule_id"], name: "index_db_folders_on_joule_id"
end
......@@ -91,6 +92,7 @@ ActiveRecord::Schema.define(version: 2018_07_10_014435) do
t.integer "size_on_disk", limit: 8
t.integer "db_id"
t.integer "joule_id"
t.boolean "locked"
t.index ["joule_id"], name: "index_db_streams_on_joule_id"
end
......
......@@ -26,9 +26,10 @@ describe Joule::Backend do
1531248642561047,
1531248642581047,
200)
expect(resp[:decimated]).to be false
expect(resp[:data].count).to be > 0
expect(resp[:data].count).to be < 200
expect(resp[:success]).to be true
expect(resp[:result][:decimated]).to be false
expect(resp[:result][:data].count).to be > 0
expect(resp[:result][:data].count).to be < 200
end
it 'loads decimated data', :vcr do
......@@ -37,8 +38,9 @@ describe Joule::Backend do
1531248642561047,
1531330705273202,
20)
expect(resp[:decimated]).to be true
expect(resp[:data].count).to be > 0
expect(resp[:data].count).to be < 200
expect(resp[:success]).to be true
expect(resp[:result][:decimated]).to be true
expect(resp[:result][:data].count).to be > 0
expect(resp[:result][:data].count).to be < 200
end
end
......@@ -18,6 +18,13 @@
"datatype": "INT16",
"keep_us": -1,
"decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [
{
"id": 12,
......@@ -62,6 +69,13 @@
"datatype": "UINT8",
"keep_us": -1,
"decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [
{
"id": 14,
......@@ -108,6 +122,13 @@
"datatype": "FLOAT32",
"keep_us": -1,
"decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [
{
"id": 17,
......@@ -169,6 +190,13 @@
"datatype": "INT32",
"keep_us": -1,
"decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [
{
"id": 20,
......@@ -219,6 +247,13 @@
"datatype": "UINT16",
"keep_us": -1,
"decimate": true,
"data_info":{
"start":null,
"end":null,
"rows":0,
"bytes":0,
"total_time":0
},
"elements": [
{
"id": 23,
......
......@@ -191,6 +191,7 @@ RSpec.describe DbStreamsController, type: :request do
num_rows: 2,
decimation_factor: 1,
notes: 'note_test_string',
download_instructions: 'how to download from CLI',
columns: [{index: 1, name: 'time', units: 'us'},
{index: 2, name: 'e1', units: 'watts'},
{index: 3, name: 'e2', units: 'joules'}]
......
......@@ -14,7 +14,8 @@ RSpec.describe 'BuildDataset' do
{id: elem1.id, type: 'raw', values: [[10,3],[11,4],nil,[12,5]]},
{id: elem2.id, type: 'raw', values: [[10,6],[11,7],nil,[12,8]]}]
@mock_adapter = instance_double(Nilmdb::Adapter,
load_data: { data: data, decimation_factor: 1})
load_data: { data: data, decimation_factor: 1},
download_instructions: "stub")
allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
@service = BuildDataset.new(@mock_adapter)
@service.run(db_stream,0,100)
......@@ -44,7 +45,8 @@ RSpec.describe 'BuildDataset' do
{id: elem1.id, type: 'decimated', values: [[10,3,2,4],[11,4,3,5],nil,[12,5,6,7]]},
{id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}]
@mock_adapter = instance_double(Nilmdb::Adapter,
load_data: { data: data, decimation_factor: 4})
load_data: { data: data, decimation_factor: 4},
download_instructions: "stub")
allow(NodeAdapterFactory).to receive(:from_nilm).and_return(@mock_adapter)
@service = BuildDataset.new(@mock_adapter)
@service.run(db_stream,0,100)
......@@ -70,7 +72,8 @@ RSpec.describe 'BuildDataset' do
{id: elem1.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]},
{id: elem2.id, type: 'interval', values: [[10,0],[11,0],nil,[12,0]]}]
@mock_adapter = instance_double(Nilmdb::Adapter,
load_data: { data: data, decimation_factor: 1})
load_data: { data: data, decimation_factor: 1},
download_instructions: "stub")
#allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new(@mock_adapter)
@service.run(db_stream,0,100)
......@@ -88,7 +91,8 @@ RSpec.describe 'BuildDataset' do
{id: elem1.id, type: 'raw', values: []},
{id: elem2.id, type: 'raw', values: []}]
@mock_adapter = instance_double(Nilmdb::Adapter,
load_data:{data: data, decimation_factor: 1})
load_data:{data: data, decimation_factor: 1},
download_instructions: "stub")
#allow(LoadStreamData).to receive(:new).and_return(@mock_stream_service)
@service = BuildDataset.new(@mock_adapter)
@service.run(db_stream,0,100)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment