Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
wattsworth
/
lumen-api
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
ece8d62d
authored
May 22, 2017
by
John Doe
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
added tests for data download
parent
e39c420e
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
219 additions
and
14 deletions
app/services/data/build_dataset.rb
app/services/data/load_stream_data.rb
app/services/db_folder/update_folder.rb
config/initializers/new_framework_defaults.rb
spec/controllers/db_streams_controller_spec.rb
spec/services/data/build_dataset_spec.rb
app/services/data/build_dataset.rb
View file @
ece8d62d
...
@@ -26,9 +26,11 @@ class BuildDataset
...
@@ -26,9 +26,11 @@ class BuildDataset
def
run
(
db_stream
,
start_time
,
end_time
)
def
run
(
db_stream
,
start_time
,
end_time
)
adapter
=
DbAdapter
.
new
(
db_stream
.
db
.
url
)
adapter
=
DbAdapter
.
new
(
db_stream
.
db
.
url
)
data_service
=
LoadStreamData
.
new
(
adapter
)
data_service
=
LoadStreamData
.
new
(
adapter
)
data_service
.
run
(
db_stream
,
start_time
,
end_time
)
absorb_status
(
data_service
.
run
(
db_stream
,
start_time
,
end_time
)
)
unless
data_service
.
success?
unless
data_service
.
success?
add_error
(
"unable to retrieve data for
#{
stream
.
path
}
"
)
add_error
(
"unable to retrieve data for
#{
db_
stream
.
path
}
"
)
return
self
return
self
end
end
@data
=
_build_dataset
(
data_service
.
data
)
@data
=
_build_dataset
(
data_service
.
data
)
...
@@ -37,7 +39,9 @@ class BuildDataset
...
@@ -37,7 +39,9 @@ class BuildDataset
@legend
[
:end_time
]
=
end_time
@legend
[
:end_time
]
=
end_time
@legend
[
:decimation_factor
]
=
data_service
.
decimation_factor
@legend
[
:decimation_factor
]
=
data_service
.
decimation_factor
@legend
[
:num_rows
]
=
@data
.
length
@legend
[
:num_rows
]
=
@data
.
length
if
(
@data
[
0
].
length!
=
db_stream
.
db_elements
.
length
+
1
)
if
(
@data
.
empty?
)
@legend
[
:notes
]
=
'there is no data available over this interval'
elsif
(
@data
[
0
].
length!
=
db_stream
.
db_elements
.
length
+
1
)
@legend
[
:notes
]
=
'some elements omitted due to insufficient decimation'
@legend
[
:notes
]
=
'some elements omitted due to insufficient decimation'
end
end
self
self
...
@@ -48,16 +52,20 @@ class BuildDataset
...
@@ -48,16 +52,20 @@ class BuildDataset
valid_columns
=
stream_data
.
select
{
|
d
|
d
[
:type
]
!=
'interval'
}
valid_columns
=
stream_data
.
select
{
|
d
|
d
[
:type
]
!=
'interval'
}
return
[]
if
(
valid_columns
.
empty?
)
return
[]
if
(
valid_columns
.
empty?
)
c
leaned_column
s
=
valid_columns
.
map
do
|
element_data
|
c
olumn_value
s
=
valid_columns
.
map
do
|
element_data
|
element_data
[
:values
].
select
{
|
row
|
row!
=
nil
}
element_data
[
:values
].
select
{
|
row
|
row!
=
nil
}
end
end
return
[]
if
column_values
.
first
.
empty?
data_columns
=
[]
data_columns
=
[]
#first column is the timestamp
#first column is the timestamp
data_columns
<<
cleaned_columns
.
first
.
transpose
[
0
]
data_columns
<<
column_values
.
first
.
transpose
[
0
]
#add element data by column
#...second column is the data,
cleaned_columns
.
each
do
|
data
|
column_values
.
each
do
|
values
|
data_columns
<<
data
.
transpose
[
1
]
#append values column wise
data_columns
<<
values
.
transpose
[
1
]
end
end
#flip back to row wise
data_columns
.
transpose
data_columns
.
transpose
end
end
...
...
app/services/data/load_stream_data.rb
View file @
ece8d62d
...
@@ -23,7 +23,7 @@ class LoadStreamData
...
@@ -23,7 +23,7 @@ class LoadStreamData
#
#
# data_type: decimated
# data_type: decimated
# event data:
# event data:
# [{id: element_id, type:
decimated
, values: [[start,0],[end,0],nil,...]}]
# [{id: element_id, type:
interval
, values: [[start,0],[end,0],nil,...]}]
# continuous or discrete data:
# continuous or discrete data:
# [{id: element_id, type: decimated, values: [[ts,y,ymin,ymax],[ts,y,ymin,ymax],nil,...]}]
# [{id: element_id, type: decimated, values: [[ts,y,ymin,ymax],[ts,y,ymin,ymax],nil,...]}]
#
#
...
...
app/services/db_folder/update_folder.rb
View file @
ece8d62d
...
@@ -52,7 +52,7 @@ class UpdateFolder
...
@@ -52,7 +52,7 @@ class UpdateFolder
@folder
.
size_on_disk
=
@size_on_disk
@folder
.
size_on_disk
=
@size_on_disk
# save the result
# save the result
unless
@folder
.
valid?
unless
@folder
.
valid?
byebug
Rails
.
logger
.
warn
(
"invalid folder:
#{
@folder
.
name
}
"
)
end
end
@folder
.
save!
@folder
.
save!
set_notice
(
"Folder updated"
)
set_notice
(
"Folder updated"
)
...
...
config/initializers/new_framework_defaults.rb
View file @
ece8d62d
...
@@ -7,17 +7,17 @@
...
@@ -7,17 +7,17 @@
# Read the Guide for Upgrading Ruby on Rails for more info on each option.
# Read the Guide for Upgrading Ruby on Rails for more info on each option.
# Enable per-form CSRF tokens. Previous versions had false.
# Enable per-form CSRF tokens. Previous versions had false.
Rails
.
application
.
config
.
action_controller
.
per_form_csrf_tokens
=
false
#
Rails.application.config.action_controller.per_form_csrf_tokens = false
# Enable origin-checking CSRF mitigation. Previous versions had false.
# Enable origin-checking CSRF mitigation. Previous versions had false.
Rails
.
application
.
config
.
action_controller
.
forgery_protection_origin_check
=
false
#
Rails.application.config.action_controller.forgery_protection_origin_check = false
# Make Ruby 2.4 preserve the timezone of the receiver when calling `to_time`.
# Make Ruby 2.4 preserve the timezone of the receiver when calling `to_time`.
# Previous versions had false.
# Previous versions had false.
ActiveSupport
.
to_time_preserves_timezone
=
false
#
ActiveSupport.to_time_preserves_timezone = false
# Require `belongs_to` associations by default. Previous versions had false.
# Require `belongs_to` associations by default. Previous versions had false.
Rails
.
application
.
config
.
active_record
.
belongs_to_required_by_default
=
false
Rails
.
application
.
config
.
active_record
.
belongs_to_required_by_default
=
false
# Do not halt callback chains when a callback returns false. Previous versions had true.
# Do not halt callback chains when a callback returns false. Previous versions had true.
ActiveSupport
.
halt_callback_chains_on_return_false
=
tru
e
# ActiveSupport.halt_callback_chains_on_return_false = fals
e
spec/controllers/db_streams_controller_spec.rb
View file @
ece8d62d
...
@@ -119,4 +119,72 @@ RSpec.describe DbStreamsController, type: :request do
...
@@ -119,4 +119,72 @@ RSpec.describe DbStreamsController, type: :request do
end
end
end
end
end
end
describe
'POST data'
do
before
do
@mock_adapter
=
double
(
DbAdapter
)
# MockDbAdapter.new #instance_double(DbAdapter)
@db_success
=
{
error:
false
,
msg:
'success'
}
@db_failure
=
{
error:
true
,
msg:
'dberror'
}
allow
(
DbAdapter
).
to
receive
(
:new
).
and_return
(
@mock_adapter
)
end
context
'with viewer permissions'
do
it
'returns dataset file as csv file'
do
#2 elements, two rows
@service_data
=
[[
1
e6
,
1
,
2
],[
2
e6
,
3
,
4
]]
@service_legend
=
{
start_time:
1
e6
,
end_time:
1
e6
,
num_rows:
2
,
decimation_factor:
1
,
notes:
'note_test_string'
,
columns:
[{
index:
1
,
name:
'time'
,
units:
'us'
},
{
index:
2
,
name:
'e1'
,
units:
'watts'
},
{
index:
3
,
name:
'e2'
,
units:
'joules'
}]
}
@mock_service
=
instance_double
(
BuildDataset
,
run:
StubService
.
new
,
success?:
true
,
data:
@service_data
,
legend:
@service_legend
)
allow
(
BuildDataset
).
to
receive
(
:new
).
and_return
(
@mock_service
)
@auth_headers
=
john
.
create_new_auth_token
post
"/db_streams/
#{
@stream
.
id
}
/data.csv"
,
params:
{
start_time:
0
,
end_time:
100
},
headers:
@auth_headers
expect
(
response
).
to
have_http_status
(
:ok
)
text
=
response
.
body
expect
(
text
).
to
include
'note_test_string'
end
it
'returns error if data cannot be found'
do
@mock_service
=
instance_double
(
BuildDataset
,
run:
StubService
.
new
,
success?:
false
)
allow
(
BuildDataset
).
to
receive
(
:new
).
and_return
(
@mock_service
)
@auth_headers
=
john
.
create_new_auth_token
post
"/db_streams/
#{
@stream
.
id
}
/data.csv"
,
params:
{
start_time:
0
,
end_time:
100
},
headers:
@auth_headers
expect
(
response
).
to
have_http_status
(
:unprocessable_entity
)
end
end
context
'without viewer permissions'
do
it
'returns unauthorized'
do
@auth_headers
=
steve
.
create_new_auth_token
post
"/db_streams/
#{
@stream
.
id
}
/data.json"
,
params:
{
name:
'ignored'
},
headers:
@auth_headers
expect
(
response
).
to
have_http_status
(
:unauthorized
)
end
end
context
'without sign-in'
do
it
'returns unauthorized'
do
post
"/db_streams/
#{
@stream
.
id
}
/data.json"
,
params:
{
start_time:
0
,
end_time:
100
}
expect
(
response
).
to
have_http_status
(
:unauthorized
)
end
end
end
end
end
spec/services/data/build_dataset_spec.rb
0 → 100644
View file @
ece8d62d
# frozen_string_literal: true
require
'rails_helper'
RSpec
.
describe
'BuildDataset'
do
let
(
:db
)
{
create
(
:db
,
max_points_per_plot:
100
)
}
let
(
:db_stream
)
{
create
(
:db_stream
,
db:
db
,
elements_count:
0
)
}
let
(
:elem0
)
{
create
(
:db_element
,
name:
'e0_continuous'
,
display_type:
'continuous'
,
column:
0
,
units:
'c'
,
db_stream:
db_stream
)
}
let
(
:elem1
)
{
create
(
:db_element
,
name:
'e1_discrete'
,
display_type:
'discrete'
,
column:
1
,
units:
'd'
,
db_stream:
db_stream
)
}
let
(
:elem2
)
{
create
(
:db_element
,
name:
'e2_event'
,
display_type:
'event'
,
column:
2
,
units:
nil
,
db_stream:
db_stream
)
}
describe
'when stream service returns raw data'
do
before
do
data
=
[{
id:
elem0
.
id
,
type:
'raw'
,
values:
[[
10
,
0
],[
11
,
1
],
nil
,[
12
,
2
]]},
{
id:
elem1
.
id
,
type:
'raw'
,
values:
[[
10
,
3
],[
11
,
4
],
nil
,[
12
,
5
]]},
{
id:
elem2
.
id
,
type:
'raw'
,
values:
[[
10
,
6
],[
11
,
7
],
nil
,[
12
,
8
]]}]
@mock_stream_service
=
instance_double
(
LoadStreamData
,
run:
StubService
.
new
,
success?:
true
,
data:
data
,
decimation_factor:
1
)
allow
(
LoadStreamData
).
to
receive
(
:new
).
and_return
(
@mock_stream_service
)
@service
=
BuildDataset
.
new
@service
.
run
(
db_stream
,
0
,
100
)
end
it
'builds the dataset'
do
expect
(
@service
.
success?
).
to
be
true
expect
(
@service
.
data
).
to
eq
([[
10
,
0
,
3
,
6
],[
11
,
1
,
4
,
7
],[
12
,
2
,
5
,
8
]])
end
it
'builds the legend'
do
legend
=
@service
.
legend
expect
(
legend
[
:start_time
]).
to
eq
0
expect
(
legend
[
:end_time
]).
to
eq
100
expect
(
legend
[
:num_rows
]).
to
eq
3
expect
(
legend
[
:decimation_factor
]).
to
eq
1
expect
(
legend
[
:columns
]).
to
eq
[
{
index:
1
,
name:
'time'
,
units:
'us'
},
{
index:
2
,
name:
'e0_continuous'
,
units:
'c'
},
{
index:
3
,
name:
'e1_discrete'
,
units:
'd'
},
{
index:
4
,
name:
'e2_event'
,
units:
'no units'
}
]
expect
(
legend
[
:notes
]).
to
be_blank
end
end
describe
'when stream service returns decimated data'
do
before
do
data
=
[{
id:
elem0
.
id
,
type:
'decimated'
,
values:
[[
10
,
0
,
-
1
,
1
],[
11
,
1
,
0
,
2
],
nil
,[
12
,
2
,
1
,
3
]]},
{
id:
elem1
.
id
,
type:
'decimated'
,
values:
[[
10
,
3
,
2
,
4
],[
11
,
4
,
3
,
5
],
nil
,[
12
,
5
,
6
,
7
]]},
{
id:
elem2
.
id
,
type:
'interval'
,
values:
[[
10
,
0
],[
11
,
0
],
nil
,[
12
,
0
]]}]
@mock_stream_service
=
instance_double
(
LoadStreamData
,
run:
StubService
.
new
,
success?:
true
,
data:
data
,
decimation_factor:
4
)
allow
(
LoadStreamData
).
to
receive
(
:new
).
and_return
(
@mock_stream_service
)
@service
=
BuildDataset
.
new
@service
.
run
(
db_stream
,
0
,
100
)
end
it
'omits event elements'
do
expect
(
@service
.
success?
).
to
be
true
expect
(
@service
.
data
).
to
eq
([[
10
,
0
,
3
],[
11
,
1
,
4
],[
12
,
2
,
5
]])
end
it
'adds note to legend'
do
legend
=
@service
.
legend
expect
(
legend
[
:decimation_factor
]).
to
eq
4
expect
(
legend
[
:columns
]).
to
eq
[
{
index:
1
,
name:
'time'
,
units:
'us'
},
{
index:
2
,
name:
'e0_continuous'
,
units:
'c'
},
{
index:
3
,
name:
'e1_discrete'
,
units:
'd'
},
]
expect
(
legend
[
:notes
]).
to_not
be_blank
end
end
describe
'when stream service returns interval data'
do
before
do
data
=
[{
id:
elem0
.
id
,
type:
'interval'
,
values:
[[
10
,
0
],[
11
,
0
],
nil
,[
12
,
0
]]},
{
id:
elem1
.
id
,
type:
'interval'
,
values:
[[
10
,
0
],[
11
,
0
],
nil
,[
12
,
0
]]},
{
id:
elem2
.
id
,
type:
'interval'
,
values:
[[
10
,
0
],[
11
,
0
],
nil
,[
12
,
0
]]}]
@mock_stream_service
=
instance_double
(
LoadStreamData
,
run:
StubService
.
new
,
success?:
true
,
data:
data
,
decimation_factor:
1
)
allow
(
LoadStreamData
).
to
receive
(
:new
).
and_return
(
@mock_stream_service
)
@service
=
BuildDataset
.
new
@service
.
run
(
db_stream
,
0
,
100
)
end
it
'returns no data'
do
expect
(
@service
.
data
).
to
be_empty
end
it
'adds note to legend'
do
expect
(
@service
.
legend
[
:notes
]).
to_not
be_empty
end
end
describe
'when stream service returns no data'
do
before
do
data
=
[{
id:
elem0
.
id
,
type:
'raw'
,
values:
[]},
{
id:
elem1
.
id
,
type:
'raw'
,
values:
[]},
{
id:
elem2
.
id
,
type:
'raw'
,
values:
[]}]
@mock_stream_service
=
instance_double
(
LoadStreamData
,
run:
StubService
.
new
,
success?:
true
,
data:
data
,
decimation_factor:
1
)
allow
(
LoadStreamData
).
to
receive
(
:new
).
and_return
(
@mock_stream_service
)
@service
=
BuildDataset
.
new
@service
.
run
(
db_stream
,
0
,
100
)
end
it
'returns no data'
do
expect
(
@service
.
data
).
to
be_empty
end
end
describe
'when stream service returns error'
do
before
do
@mock_stream_service
=
instance_double
(
LoadStreamData
,
run:
StubService
.
new
,
success?:
false
,
errors:
[
'generic error'
],
warnings:
[],
notices:
[])
allow
(
LoadStreamData
).
to
receive
(
:new
).
and_return
(
@mock_stream_service
)
@service
=
BuildDataset
.
new
@service
.
run
(
db_stream
,
0
,
100
)
end
it
'returns error'
do
expect
(
@service
.
success?
).
to
be
false
expect
(
@service
.
errors
).
to_not
be_empty
end
end
end
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment