Commit e39c420e by John Doe

added file downloads but **NOT TESTED**

parent 17c1629d
...@@ -15,6 +15,14 @@ class DbStreamsController < ApplicationController ...@@ -15,6 +15,14 @@ class DbStreamsController < ApplicationController
end end
def data def data
@service = BuildDataset.new
@service.run(@db_stream,params[:start_time].to_i,params[:end_time].to_i)
unless @service.success?
head :unprocessable_entity
return
end
@data = @service.data
@legend = @service.legend
headers["Content-Disposition"] = "attachment; filename='#{@db_stream.name}.txt'" headers["Content-Disposition"] = "attachment; filename='#{@db_stream.name}.txt'"
render :layout=>false, :content_type => "text/plain" render :layout=>false, :content_type => "text/plain"
end end
......
# frozen_string_literal: true
# Creates a dataset array from the given stream
# at the highest resolution allowed by the db
#
class BuildDataset
include ServiceStatus
attr_reader :data, :legend
def initialize
super()
@data = [] # [[ts, val1, val2, val3, ...],
# [ts, val1, val2, val3, ...]]
@legend = {
start_time: '',
end_time: '',
num_rows: '',
decimation_factor: 1,
columns: [], # [{index: 1, name: 'time', units: 'us'},...]
notes: ''
}
end
# fill @data with values from db_stream
# and populate @legend
def run(db_stream, start_time, end_time)
adapter = DbAdapter.new(db_stream.db.url)
data_service = LoadStreamData.new(adapter)
data_service.run(db_stream, start_time, end_time)
unless data_service.success?
add_error("unable to retrieve data for #{stream.path}")
return self
end
@data = _build_dataset(data_service.data)
@legend[:columns] = _build_legend_columns(data_service.data, db_stream)
@legend[:start_time] = start_time
@legend[:end_time] = end_time
@legend[:decimation_factor] = data_service.decimation_factor
@legend[:num_rows] = @data.length
if(@data[0].length!=db_stream.db_elements.length+1)
@legend[:notes] = 'some elements omitted due to insufficient decimation'
end
self
end
def _build_dataset(stream_data)
#can only build a dataset if data is actually present (raw or decimated)
valid_columns = stream_data.select{|d| d[:type]!='interval'}
return [] if(valid_columns.empty?)
cleaned_columns = valid_columns.map do |element_data|
element_data[:values].select{|row| row!=nil}
end
data_columns = []
#first column is the timestamp
data_columns << cleaned_columns.first.transpose[0]
#add element data by column
cleaned_columns.each do |data|
data_columns << data.transpose[1]
end
data_columns.transpose
end
def _build_legend_columns(stream_data, db_stream)
legend_columns = [{index: 1, name: 'time', units: 'us'}]
legend_index=2 #1 is for timestamp
stream_data.each do |d|
next if d[:type]=='interval'
element = db_stream.db_elements.find_by_id(d[:id])
legend_columns<<{
index: legend_index,
name: element.name,
units: element.units.blank? ? 'no units': element.units
}
legend_index+=1
end
legend_columns
end
end
...@@ -3,13 +3,14 @@ ...@@ -3,13 +3,14 @@
# Loads stream data over specified interval # Loads stream data over specified interval
class LoadStreamData class LoadStreamData
include ServiceStatus include ServiceStatus
attr_reader :data, :data_type attr_reader :data, :data_type, :decimation_factor
def initialize(db_adapter) def initialize(db_adapter)
super() super()
@db_adapter = db_adapter @db_adapter = db_adapter
@data = [] @data = []
@data_type = 'unset' # interval, raw, decimated @data_type = 'unset' # interval, raw, decimated
@decimation_factor = 1
end end
# load data at or below the resolution of the # load data at or below the resolution of the
...@@ -19,12 +20,16 @@ class LoadStreamData ...@@ -19,12 +20,16 @@ class LoadStreamData
# data_type: raw # data_type: raw
# data: # data:
# [{id: element_id, type: raw values: [[ts,y],[ts,y],nil,[ts,y]]},...] # [{id: element_id, type: raw values: [[ts,y],[ts,y],nil,[ts,y]]},...]
# data_type: interval #
# data:
# [{id: element_id, type: raw, values: [[start,0],[end,0],nil,...]}]
# data_type: decimated # data_type: decimated
# event data:
# [{id: element_id, type: decimated, values: [[start,0],[end,0],nil,...]}]
# continuous or discrete data:
# [{id: element_id, type: decimated, values: [[ts,y,ymin,ymax],[ts,y,ymin,ymax],nil,...]}]
#
# data_type: interval
# data: # data:
# [{id: element_id, type: raw, values: [[ts,y,ymin,ymax],[ts,y,ymin,ymax],nil,...]}] # [{id: element_id, type: decimated, values: [[start,0],[end,0],nil,...]}]
# #
def run(db_stream, start_time, end_time) def run(db_stream, start_time, end_time)
resolution = db_stream.db.max_points_per_plot resolution = db_stream.db.max_points_per_plot
...@@ -45,7 +50,8 @@ class LoadStreamData ...@@ -45,7 +50,8 @@ class LoadStreamData
@data = __build_interval_data(elements, resp) @data = __build_interval_data(elements, resp)
return self return self
end end
# request is plottable, see if we can get the raw (level 1) data # request is plottable, see if we can get the data
@decimation_factor = plottable_decim.level
path = __build_path(db_stream, plottable_decim.level) path = __build_path(db_stream, plottable_decim.level)
resp = @db_adapter.get_data(path, start_time, end_time) resp = @db_adapter.get_data(path, start_time, end_time)
if resp.nil? if resp.nil?
......
...@@ -4,26 +4,29 @@ ...@@ -4,26 +4,29 @@
# Source: <%=@nilm.name%> <%unless @nilm.description.blank?%>(<%=@nilm.description%>)<%end%> # Source: <%=@nilm.name%> <%unless @nilm.description.blank?%>(<%=@nilm.description%>)<%end%>
# #
# #
# start: -- # start: <%=Time.at(@legend[:start_time]/1e6)%>
# end: -- # end: <%=Time.at(@legend[:end_time]/1e6)%>
# total time: -- # total time: <%=distance_of_time_in_words(@legend[:end_time]/1e6-@legend[:start_time]/1e6)%>
# total rows: -- # total rows: <%=@legend[:num_rows]%>
# decimation factor: <%=@legend[:decimation_factor]%>
# #
#The raw data file can be retrieved at the following URL: # this file can be loaded directly into MATLAB
# <%=@db.url%>/stream/extract?path=<%=@db_stream.path%>&start=?&end=?
# #
# to import in matlab run: # dataset = importdata('thisfilename.csv')
# nilm = importdata('thisfilename.txt')
# #
# nilm.textdata: this help text # dataset.textdata: this help text
# nilm.data: the data # dataset.data: the data
# #
# The data has <%= @db_stream.db_elements.count+1 %> columns with the following format: # raw data can be accessed using nilmtool, run:
# nilmtool -u <%=@db.url%> extract -s @<%=@legend[:start_time]%> -e @<%=@legend[:end_time]%> <%=@db_stream.path%>
# #
# Column 1: Timestamp (microseconds) # <%=@legend[:notes]%>
<% col = 2%> # ----------- LEGEND ---------------
<%@db_stream.db_elements.each do |element|%> <%@legend[:columns].each do |col|%>
# Column <%=col%>: <%=element.name%> (<%=element.units%>) # Column <%=col[:index]%>: <%=col[:name]%> (<%=col[:units]%>)
<%col+=1%> <%end%>
<%end%> # -----------------------------------
# #
<% @data.each do |row|%>
<%= row.join(", ")%>
<%end%>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment