Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
wattsworth
/
lumen-api
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
e39c420e
authored
May 21, 2017
by
John Doe
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
added file downloads but **NOT TESTED**
parent
17c1629d
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
119 additions
and
23 deletions
app/controllers/db_streams_controller.rb
app/services/data/build_dataset.rb
app/services/data/load_stream_data.rb
app/views/db_streams/data.csv.erb
app/controllers/db_streams_controller.rb
View file @
e39c420e
...
...
@@ -15,6 +15,14 @@ class DbStreamsController < ApplicationController
end
def
data
@service
=
BuildDataset
.
new
@service
.
run
(
@db_stream
,
params
[
:start_time
].
to_i
,
params
[
:end_time
].
to_i
)
unless
@service
.
success?
head
:unprocessable_entity
return
end
@data
=
@service
.
data
@legend
=
@service
.
legend
headers
[
"Content-Disposition"
]
=
"attachment; filename='
#{
@db_stream
.
name
}
.txt'"
render
:layout
=>
false
,
:content_type
=>
"text/plain"
end
...
...
app/services/data/build_dataset.rb
0 → 100644
View file @
e39c420e
# frozen_string_literal: true
# Creates a dataset array from the given stream
# at the highest resolution allowed by the db
#
class
BuildDataset
include
ServiceStatus
attr_reader
:data
,
:legend
def
initialize
super
()
@data
=
[]
# [[ts, val1, val2, val3, ...],
# [ts, val1, val2, val3, ...]]
@legend
=
{
start_time:
''
,
end_time:
''
,
num_rows:
''
,
decimation_factor:
1
,
columns:
[],
# [{index: 1, name: 'time', units: 'us'},...]
notes:
''
}
end
# fill @data with values from db_stream
# and populate @legend
def
run
(
db_stream
,
start_time
,
end_time
)
adapter
=
DbAdapter
.
new
(
db_stream
.
db
.
url
)
data_service
=
LoadStreamData
.
new
(
adapter
)
data_service
.
run
(
db_stream
,
start_time
,
end_time
)
unless
data_service
.
success?
add_error
(
"unable to retrieve data for
#{
stream
.
path
}
"
)
return
self
end
@data
=
_build_dataset
(
data_service
.
data
)
@legend
[
:columns
]
=
_build_legend_columns
(
data_service
.
data
,
db_stream
)
@legend
[
:start_time
]
=
start_time
@legend
[
:end_time
]
=
end_time
@legend
[
:decimation_factor
]
=
data_service
.
decimation_factor
@legend
[
:num_rows
]
=
@data
.
length
if
(
@data
[
0
].
length!
=
db_stream
.
db_elements
.
length
+
1
)
@legend
[
:notes
]
=
'some elements omitted due to insufficient decimation'
end
self
end
def
_build_dataset
(
stream_data
)
#can only build a dataset if data is actually present (raw or decimated)
valid_columns
=
stream_data
.
select
{
|
d
|
d
[
:type
]
!=
'interval'
}
return
[]
if
(
valid_columns
.
empty?
)
cleaned_columns
=
valid_columns
.
map
do
|
element_data
|
element_data
[
:values
].
select
{
|
row
|
row!
=
nil
}
end
data_columns
=
[]
#first column is the timestamp
data_columns
<<
cleaned_columns
.
first
.
transpose
[
0
]
#add element data by column
cleaned_columns
.
each
do
|
data
|
data_columns
<<
data
.
transpose
[
1
]
end
data_columns
.
transpose
end
def
_build_legend_columns
(
stream_data
,
db_stream
)
legend_columns
=
[{
index:
1
,
name:
'time'
,
units:
'us'
}]
legend_index
=
2
#1 is for timestamp
stream_data
.
each
do
|
d
|
next
if
d
[
:type
]
==
'interval'
element
=
db_stream
.
db_elements
.
find_by_id
(
d
[
:id
])
legend_columns
<<
{
index:
legend_index
,
name:
element
.
name
,
units:
element
.
units
.
blank?
?
'no units'
:
element
.
units
}
legend_index
+=
1
end
legend_columns
end
end
app/services/data/load_stream_data.rb
View file @
e39c420e
...
...
@@ -3,13 +3,14 @@
# Loads stream data over specified interval
class
LoadStreamData
include
ServiceStatus
attr_reader
:data
,
:data_type
attr_reader
:data
,
:data_type
,
:decimation_factor
def
initialize
(
db_adapter
)
super
()
@db_adapter
=
db_adapter
@data
=
[]
@data_type
=
'unset'
# interval, raw, decimated
@decimation_factor
=
1
end
# load data at or below the resolution of the
...
...
@@ -19,12 +20,16 @@ class LoadStreamData
# data_type: raw
# data:
# [{id: element_id, type: raw values: [[ts,y],[ts,y],nil,[ts,y]]},...]
# data_type: interval
# data:
# [{id: element_id, type: raw, values: [[start,0],[end,0],nil,...]}]
#
# data_type: decimated
# event data:
# [{id: element_id, type: decimated, values: [[start,0],[end,0],nil,...]}]
# continuous or discrete data:
# [{id: element_id, type: decimated, values: [[ts,y,ymin,ymax],[ts,y,ymin,ymax],nil,...]}]
#
# data_type: interval
# data:
# [{id: element_id, type:
raw, values: [[ts,y,ymin,ymax],[ts,y,ymin,ymax
],nil,...]}]
# [{id: element_id, type:
decimated, values: [[start,0],[end,0
],nil,...]}]
#
def
run
(
db_stream
,
start_time
,
end_time
)
resolution
=
db_stream
.
db
.
max_points_per_plot
...
...
@@ -45,7 +50,8 @@ class LoadStreamData
@data
=
__build_interval_data
(
elements
,
resp
)
return
self
end
# request is plottable, see if we can get the raw (level 1) data
# request is plottable, see if we can get the data
@decimation_factor
=
plottable_decim
.
level
path
=
__build_path
(
db_stream
,
plottable_decim
.
level
)
resp
=
@db_adapter
.
get_data
(
path
,
start_time
,
end_time
)
if
resp
.
nil?
...
...
app/views/db_streams/data.csv.erb
View file @
e39c420e
...
...
@@ -4,26 +4,29 @@
# Source:
<%=
@nilm
.
name
%>
<%
unless
@nilm
.
description
.
blank?
%>
(
<%=
@nilm
.
description
%>
)
<%
end
%>
#
#
# start: --
# end: --
# total time: --
# total rows: --
# start:
<%=
Time
.
at
(
@legend
[
:start_time
]
/
1
e6
)
%>
# end:
<%=
Time
.
at
(
@legend
[
:end_time
]
/
1
e6
)
%>
# total time:
<%=
distance_of_time_in_words
(
@legend
[
:end_time
]
/
1
e6
-
@legend
[
:start_time
]
/
1
e6
)
%>
# total rows:
<%=
@legend
[
:num_rows
]
%>
# decimation factor:
<%=
@legend
[
:decimation_factor
]
%>
#
#The raw data file can be retrieved at the following URL:
#
<%=
@db
.
url
%>
/stream/extract?path=
<%=
@db_stream
.
path
%>
&
start=?
&
end=?
# this file can be loaded directly into MATLAB
#
# to import in matlab run:
# nilm = importdata('thisfilename.txt')
# dataset = importdata('thisfilename.csv')
#
#
nilm
.textdata: this help text
#
nilm
.data: the data
#
dataset
.textdata: this help text
#
dataset
.data: the data
#
# The data has
<%=
@db_stream
.
db_elements
.
count
+
1
%>
columns with the following format:
# raw data can be accessed using nilmtool, run:
# nilmtool -u
<%=
@db
.
url
%>
extract -s @
<%=
@legend
[
:start_time
]
%>
-e @
<%=
@legend
[
:end_time
]
%>
<%=
@db_stream
.
path
%>
#
#
Column 1: Timestamp (microseconds)
<%
col
=
2
%>
<%
@db_stream
.
db_elements
.
each
do
|
element
|
%>
#
Column
<%=
col
%>
:
<%=
element
.
name
%>
(
<%=
element
.
units
%>
)
<%
col
+=
1
%>
<%
end
%>
#
<%=
@legend
[
:notes
]
%>
# ----------- LEGEND ---------------
<%
@legend
[
:columns
].
each
do
|
col
|
%>
#
Column
<%=
col
[
:index
]
%>
:
<%=
col
[
:name
]
%>
(
<%=
col
[
:units
]
%>
)
<%
end
%>
# -----------------------------------
#
<%
@data
.
each
do
|
row
|
%>
<%=
row
.
join
(
", "
)
%>
<%
end
%>
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment