Commit f651e230 by John Doe

refactored file->stream, stream->element, improved DbAdapter parsing

parent 56d1f6b4
...@@ -8,17 +8,20 @@ class DbAdapter ...@@ -8,17 +8,20 @@ class DbAdapter
@url = url @url = url
end end
def schema # rubocop:disable Metrics/MethodLength def schema
# GET extended info stream list # GET extended info stream list
dump = self.class.get("#{@url}/stream/list?extended=1") dump = self.class.get("#{@url}/stream/list?extended=1")
dump.parsed_response.map do |entry| dump.parsed_response.map do |entry|
metadata = __get_metadata(entry[0]) metadata = if entry[0].match(UpdateStream.decimation_tag).nil?
__get_metadata(entry[0])
else
{} # decimation entry, no need to pull metadata
end
# The streams are not pure attributes, pull them out # The streams are not pure attributes, pull them out
streams = metadata.delete(:streams) || {} elements = metadata.delete(:streams) || []
elements.each(&:symbolize_keys!)
# Create the schema: # Create the schema:
# 3 elements: path, attributes, streams # 3 elements: path, attributes, elements
{ {
path: entry[0], path: entry[0],
attributes: { attributes: {
...@@ -28,7 +31,7 @@ class DbAdapter ...@@ -28,7 +31,7 @@ class DbAdapter
total_rows: entry[4], total_rows: entry[4],
total_time: entry[5] total_time: entry[5]
}.merge(metadata), }.merge(metadata),
streams: streams elements: elements
} }
end end
end end
...@@ -39,10 +42,22 @@ class DbAdapter ...@@ -39,10 +42,22 @@ class DbAdapter
metadata = JSON.parse(dump.parsed_response['config_key__'] || '{}') metadata = JSON.parse(dump.parsed_response['config_key__'] || '{}')
# Add plain-text metadata keys (retrofit for *info streams which keep # Add plain-text metadata keys (retrofit for *info streams which keep
# attributes in seperate metadata tags # attributes in seperate metadata tags
metadata.merge!(dump.parsed_response.slice('delete_locked', metadata.merge!(dump.parsed_response)
'description', __sanitize_metadata(metadata)
'hidden', end
'name'))
# make sure all the keys are valid parameters
def __sanitize_metadata(metadata)
metadata.slice!('delete_locked', 'description', 'hidden', 'name',
'streams')
if(metadata['streams'] != nil)
# sanitize 'streams' (elements) parameters
element_attrs = DbElement.attribute_names.map(&:to_sym)
metadata['streams'].map! do |element|
element.symbolize_keys
.slice(*element_attrs)
end
end
metadata.symbolize_keys metadata.symbolize_keys
end end
end end
# frozen_string_literal: true # frozen_string_literal: true
# Controller for DbFiles # controller for DbStreams
class DbFilesController < ApplicationController class DbElementsController < ApplicationController
end end
# frozen_string_literal: true # frozen_string_literal: true
# controller for DbStreams # Controller for DbFiles
class DbStreamsController < ApplicationController class DbStreamsController < ApplicationController
end end
...@@ -2,4 +2,8 @@ ...@@ -2,4 +2,8 @@
# controller for NILM objects # controller for NILM objects
class NilmsController < ApplicationController class NilmsController < ApplicationController
def index
nilms = Nilm.all
render json: nilms
end
end end
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
# Decimation level of a file # Decimation level of a file
class DbDecimation < ActiveRecord::Base class DbDecimation < ActiveRecord::Base
belongs_to :db_file belongs_to :db_stream
def as_json(_options = {}) def as_json(_options = {})
super(except: [:created_at, :updated_at]) super(except: [:created_at, :updated_at])
......
# frozen_string_literal: true
# a column in a stream, this is the lowest element
# in the db hierarchy and contains actual data
class DbElement < ActiveRecord::Base
belongs_to :db_stream
def as_json(_options = {})
super(except: [:created_at, :updated_at])
end
end
# frozen_string_literal: true
# validate that the file has the appropriate number
# of streams given the format string
class DbDataTypeValidator < ActiveModel::Validator
def validate(record)
# streams might not be built yet
return if record.db_streams.count == 0
# TODO: check for valid format strings (float32, uint8, etc)
unless record.db_streams.count == record.column_count
record.errors[:base] << "must have #{record.column_count} \
streams for format #{record.data_type}"
end
end
end
# A file in the database, contains one or more Streams
class DbFile < ActiveRecord::Base
belongs_to :db_folder
has_many :db_streams, dependent: :destroy
has_many :db_decimations, dependent: :destroy
validates_with DbDataTypeValidator
def defined_attributes
[:name, :name_abbrev, :description, :hidden]
end
def remove(db_service:)
db_service.remove_file(path)
destroy
end
def data_format
/^(\w*)_\d*$/.match(data_type)[1]
end
def column_count
/^\w*_(\d*)$/.match(data_type)[1].to_i
end
def as_json(_options = {})
file = super(except: [:created_at, :updated_at])
file[:streams] = db_streams.map(&:as_json)
file[:decimations] = db_decimations.map(&:as_json)
file
end
end
...@@ -9,25 +9,25 @@ class DbFolder < ActiveRecord::Base ...@@ -9,25 +9,25 @@ class DbFolder < ActiveRecord::Base
foreign_key: 'parent_id', foreign_key: 'parent_id',
dependent: :destroy dependent: :destroy
has_many :db_files, has_many :db_streams,
dependent: :destroy dependent: :destroy
def self.defined_attributes def self.defined_attributes
[:name, :description, :hidden] [:name, :description, :hidden]
end end
def insert_file(file:) def insert_stream(stream:)
# add the file to this folder # add the stream to this folder
file.db_folder = self stream.db_folder = self
# verify that the file can be here # verify that the file can be here
return false unless file.valid? return false unless stream.valid?
true true
end end
def as_json(_options = {}) def as_json(_options = {})
folder = super(except: [:created_at, :updated_at]) folder = super(except: [:created_at, :updated_at])
folder[:subfolders] = subfolders.map(&:as_json) folder[:subfolders] = subfolders.map(&:as_json)
folder[:files] = db_files.map(&:as_json) folder[:streams] = db_streams.map(&:as_json)
folder folder
end end
end end
# frozen_string_literal: true # frozen_string_literal: true
# a stream in the database, this is the lowest element # validate that the file has the appropriate number
# in the db hierarchy and contains actual data # of streams given the format string
class DbDataTypeValidator < ActiveModel::Validator
def validate(record)
# streams might not be built yet
return if record.db_elements.count.zero?
# TODO: check for valid format strings (float32, uint8, etc)
unless record.db_elements.count == record.column_count
record.errors[:base] << "must have #{record.column_count} \
elements for format #{record.data_type}"
end
end
end
# A file in the database, contains one or more Streams
class DbStream < ActiveRecord::Base class DbStream < ActiveRecord::Base
belongs_to :db_file belongs_to :db_folder
has_many :db_elements, dependent: :destroy
has_many :db_decimations, dependent: :destroy
validates_with DbDataTypeValidator
def defined_attributes
[:name, :name_abbrev, :description, :hidden]
end
def remove(db_service:)
db_service.remove_file(path)
destroy
end
def data_format
/^(\w*)_\d*$/.match(data_type)[1]
end
def column_count
/^\w*_(\d*)$/.match(data_type)[1].to_i
end
def as_json(_options = {}) def as_json(_options = {})
super(except: [:created_at, :updated_at]) stream = super(except: [:created_at, :updated_at])
stream[:elements] = db_elements.map(&:as_json)
stream[:decimations] = db_decimations.map(&:as_json)
stream
end end
end end
...@@ -3,4 +3,9 @@ ...@@ -3,4 +3,9 @@
# NILM object # NILM object
class Nilm < ActiveRecord::Base class Nilm < ActiveRecord::Base
has_one :db has_one :db
def as_json(_options = {})
nilm = super(except: [:created_at, :updated_at])
nilm
end
end end
...@@ -4,31 +4,30 @@ ...@@ -4,31 +4,30 @@
class EditFolder class EditFolder
include ServiceStatus include ServiceStatus
def initialize(db_adapter) def initialize(db_adapter)
super() super()
@db_adapter = db_adapter
end end
def run(db_file, *attribs) def run(db_stream, **attribs)
# only accept valid attributes # only accept valid attributes
attribs.slice!([:name, :description]) attribs.slice!([:name, :description])
# assign the new attributes and check if the # assign the new attributes and check if the
# result is valid (eg file's can't have the same name) # result is valid (eg stream's can't have the same name)
db_file.assign_attributes(attribs) db_stream.assign_attributes(attribs)
unless db_file.valid? unless db_stream.valid?
add_error(db_file.errors) add_error(db_stream.errors)
return self return self
end end
# local model checks out, update the remote NilmDB # local model checks out, update the remote NilmDB
db_adapter.update_metadata(db_file.path, @db_adapter.update_metadata(db_stream.path, attribs)
attribs.filter { |x| x.in[:name, :description] } )
# if there was an error don't save the model # if there was an error don't save the model
if db_adapter.status == ERROR: if db_adapter.status == ERROR
add_error(db_adapter.error_msg) add_error(db_adapter.error_msg)
return self return self
end end
# everything went well, save the model # everything went well, save the model
db_file.save! db_stream.save!
self self
end end
end
end
\ No newline at end of file
# frozen_string_literal: true # frozen_string_literal: true
# NOTE: This file is out of date!!
# Agent class for DbFolders # Agent class for DbFolders
class InsertFile class InsertStream
attr_accessor :error_msg attr_accessor :error_msg
def initialize(db_service:, db_builder:) def initialize(db_service:, db_builder:)
...@@ -9,31 +9,31 @@ class InsertFile ...@@ -9,31 +9,31 @@ class InsertFile
@db_builder = db_builder @db_builder = db_builder
end end
def insert_file(folder:, file:) def insert_stream(folder:, stream:)
@error_msg = '' @error_msg = ''
return false unless __put_file_in_folder(file: file, folder: folder) return false unless __put_stream_in_folder(stream: stream, folder: folder)
return false unless __make_path_for_file(file: file, folder: folder) return false unless __make_path_for_stream(stream: stream, folder: folder)
return false unless __create_file_on_db(file: file) return false unless __create_stream_on_db(stream: stream)
file.save! stream.save!
end end
def __put_file_in_folder(file:, folder:) def __put_stream_in_folder(stream:, folder:)
return true if folder.insert_file(file: file) return true if folder.insert_stream(stream: stream)
@error_msg = "could not add file to folder #{folder.name}" @error_msg = "could not add stream to folder #{folder.name}"
false false
end end
def __make_path_for_file(file:, folder:) def __make_path_for_stream(stream:, folder:)
file.path = @db_builder.build_path(folder_path: folder.path, stream.path = @db_builder.build_path(folder_path: folder.path,
file_name: file.name) stream_name: stream.name)
true true
end end
def __create_file_on_db(file:) def __create_stream_on_db(stream:)
return true if @db_service.create_file(file) return true if @db_service.create_stream(stream)
@error_msg = "from db_service: #{db_service.error_msg}" @error_msg = "from db_service: #{db_service.error_msg}"
file.path = '' # clear out the file settings stream.path = '' # clear out the stream settings
file.folder = nil stream.folder = nil
false false
end end
end end
...@@ -12,7 +12,7 @@ class UpdateFolder ...@@ -12,7 +12,7 @@ class UpdateFolder
# array are no longer present on the remote db # array are no longer present on the remote db
# and will be destroyed # and will be destroyed
@subfolder_ids = folder.subfolders.ids @subfolder_ids = folder.subfolders.ids
@file_ids = folder.db_files.ids @stream_ids = folder.db_streams.ids
super() super()
end end
...@@ -25,13 +25,13 @@ class UpdateFolder ...@@ -25,13 +25,13 @@ class UpdateFolder
) )
# process the contents of the folder # process the contents of the folder
__parse_folder_entries(@folder, @entries) __parse_folder_entries(@folder, @entries)
# delete any files or folders still in the # delete any streams or folders still in the
# tracked ID arrays, they haven't been touched # tracked ID arrays, they haven't been touched
# so they must have been removed from the remote # so they must have been removed from the remote
# db some other way (eg nilmtool) # db some other way (eg nilmtool)
unless @file_ids.empty? unless @stream_ids.empty?
@folder.db_files.destroy(*@file_ids) @folder.db_streams.destroy(*@stream_ids)
add_warning('Removed files no longer in the remote database') add_warning('Removed streams no longer in the remote database')
end end
unless @subfolder_ids.empty? unless @subfolder_ids.empty?
...@@ -54,18 +54,18 @@ class UpdateFolder ...@@ -54,18 +54,18 @@ class UpdateFolder
end end
info_entry ||= {} info_entry ||= {}
# if there is an info entry, remove it from the array # if there is an info entry, remove it from the array
# so we don't process it as a seperate file # so we don't process it as a seperate stream
entries.delete(info_entry) entries.delete(info_entry)
# return the attributes # return the attributes
info_entry[:attributes] info_entry[:attributes]
end end
# Creates or updates the folder defined by these entries. # Creates or updates the folder defined by these entries.
# Then adds in any subfolders or subfiles # Then adds in any subfolders or streams
def __parse_folder_entries(folder, entries) def __parse_folder_entries(folder, entries)
# group the folder entries # group the folder entries
groups = __group_entries(entries) groups = __group_entries(entries)
# process the groups as subfolders or files # process the groups as subfolders or streams
__process_folder_contents(folder, groups) __process_folder_contents(folder, groups)
# return the updated folder # return the updated folder
folder folder
...@@ -80,7 +80,7 @@ class UpdateFolder ...@@ -80,7 +80,7 @@ class UpdateFolder
entry_groups = {} entry_groups = {}
entries.map do |entry| entries.map do |entry|
# group streams by their base paths (ignore ~decim endings) # group streams by their base paths (ignore ~decim endings)
group_name = entry[:chunks].pop.gsub(UpdateFile.decimation_tag, '') group_name = entry[:chunks].pop.gsub(UpdateStream.decimation_tag, '')
__add_to_group(entry_groups, group_name, entry) __add_to_group(entry_groups, group_name, entry)
end end
entry_groups entry_groups
...@@ -97,11 +97,11 @@ class UpdateFolder ...@@ -97,11 +97,11 @@ class UpdateFolder
end end
end end
# convert the groups into subfolders and files # convert the groups into subfolders and streams
def __process_folder_contents(folder, groups) def __process_folder_contents(folder, groups)
groups.each do |name, entry_group| groups.each do |name, entry_group|
if file?(entry_group) if stream?(entry_group)
updater = __build_file(folder, entry_group, name) updater = __build_stream(folder, entry_group, name)
next if updater.nil? # ignore orphaned decimations next if updater.nil? # ignore orphaned decimations
else # its a folder else # its a folder
updater = __build_folder(folder, entry_group, name) updater = __build_folder(folder, entry_group, name)
...@@ -110,31 +110,31 @@ class UpdateFolder ...@@ -110,31 +110,31 @@ class UpdateFolder
end end
end end
# determine if the entry groups constitute a single file # determine if the entry groups constitute a single stream
def file?(entry_group) def stream?(entry_group)
# if any entry_group has chunks left, this is a folder # if any entry_group has chunks left, this is a folder
entry_group.select { |entry| entry_group.select { |entry|
!entry[:chunks].empty? !entry[:chunks].empty?
}.count.zero? }.count.zero?
end end
# create or update a DbFile object at the # create or update a DbStream object at the
# specified path. # specified path.
def __build_file(folder, entry_group, def __build_stream(folder, entry_group,
default_name) default_name)
base = __base_entry(entry_group) base = __base_entry(entry_group)
unless base # corrupt file, don't process unless base # corrupt stream, don't process
add_warning("#{entry_group.count} orphan decimations in #{folder.name}") add_warning("#{entry_group.count} orphan decimations in #{folder.name}")
return return
end end
# find or create the file # find or create the stream
file = folder.db_files.find_by_path(base[:path]) stream = folder.db_streams.find_by_path(base[:path])
file ||= DbFile.new(db_folder: folder, stream ||= DbStream.new(db_folder: folder,
path: base[:path], name: default_name) path: base[:path], name: default_name)
# remove the id (if present) to mark this file as updated # remove the id (if present) to mark this stream as updated
@file_ids -= [file.id] @stream_ids -= [stream.id]
# return the Updater, don't run it # return the Updater, don't run it
UpdateFile.new(file, base, entry_group - [base]) UpdateStream.new(stream, base, entry_group - [base])
end end
# find the base stream in this entry_group # find the base stream in this entry_group
...@@ -142,7 +142,7 @@ class UpdateFolder ...@@ -142,7 +142,7 @@ class UpdateFolder
# adds a warning and returns nil if base entry is missing # adds a warning and returns nil if base entry is missing
def __base_entry(entry_group) def __base_entry(entry_group)
base_entry = entry_group.select { |entry| base_entry = entry_group.select { |entry|
entry[:path].match(UpdateFile.decimation_tag).nil? entry[:path].match(UpdateStream.decimation_tag).nil?
}.first }.first
return nil unless base_entry return nil unless base_entry
base_entry base_entry
......
# frozen_string_literal: true # frozen_string_literal: true
# Handles construction of DbFolder objects # Handles construction of DbFolder objects
class UpdateFile class UpdateStream
include ServiceStatus include ServiceStatus
def initialize(file, base_entry, decimation_entries) def initialize(stream, base_entry, decimation_entries)
@file = file @stream = stream
@base_entry = base_entry @base_entry = base_entry
@decimation_entries = decimation_entries @decimation_entries = decimation_entries
super() super()
end end
def run def run
__update_file(@file, @base_entry, @decimation_entries) __update_stream(@stream, @base_entry, @decimation_entries)
self self
end end
...@@ -21,39 +21,39 @@ class UpdateFile ...@@ -21,39 +21,39 @@ class UpdateFile
/~decim-([\d]+)$/ /~decim-([\d]+)$/
end end
# create or update a DbFile object at the # create or update a DbStream object at the
# specified path. # specified path.
def __update_file(file, base_entry, decimation_entries) def __update_stream(stream, base_entry, decimation_entries)
file.update_attributes(base_entry[:attributes]) stream.update_attributes(base_entry[:attributes])
file.save! stream.save!
__build_decimations(file: file, __build_decimations(stream: stream,
entry_group: decimation_entries) entry_group: decimation_entries)
__build_streams(file: file, stream_data: base_entry[:streams]) __build_elements(stream: stream, stream_data: base_entry[:elements])
end end
# create or update DbDecimations for the # create or update DbDecimations for the
# specified DbFile # specified DbStream
def __build_decimations(file:, entry_group:) def __build_decimations(stream:, entry_group:)
entry_group.each do |entry| entry_group.each do |entry|
level = entry[:path].match(UpdateFile.decimation_tag)[1].to_i level = entry[:path].match(UpdateStream.decimation_tag)[1].to_i
decim = file.db_decimations.find_by_level(level) decim = stream.db_decimations.find_by_level(level)
decim ||= DbDecimation.new(db_file: file, level: level) decim ||= DbDecimation.new(db_stream: stream, level: level)
decim.update_attributes(entry[:attributes]) decim.update_attributes(entry[:attributes])
decim.save! decim.save!
end end
end end
# create or update DbStreams for the # create or update DbStreams for the
# specified DbFile # specified DbStream
def __build_streams(file:, stream_data:) def __build_elements(stream:, stream_data:)
file.column_count.times do |x| stream.column_count.times do |x|
stream = file.db_streams.find_by_column(x) element = stream.db_elements.find_by_column(x)
stream ||= DbStream.new(db_file: file) element ||= DbElement.new(db_stream: stream)
# check if there is stream metadata for column x # check if there is stream metadata for column x
entry = stream_data.select { |meta| meta[:column] == x } entry = stream_data.select { |meta| meta[:column] == x }
# use the metadata if present # use the metadata if present
stream.update_attributes(entry[0] || {}) element.update_attributes(entry[0] || {})
stream.save! element.save!
end end
end end
end end
# frozen_string_literal: true
require File.expand_path('../boot', __FILE__) require File.expand_path('../boot', __FILE__)
require 'rails/all' require 'rails/all'
...@@ -24,7 +25,7 @@ module ControlPanel ...@@ -24,7 +25,7 @@ module ControlPanel
config.active_record.raise_in_transactional_callbacks = true config.active_record.raise_in_transactional_callbacks = true
# Add folders under the services directory # Add folders under the services directory
['nilm','db','db_folder','db_file'].each do |service| %w(nilm db db_folder db_stream).each do |service|
config.autoload_paths << Rails.root.join("app/services/#{service}") config.autoload_paths << Rails.root.join("app/services/#{service}")
end end
end end
......
# frozen_string_literal: true
class ChangeFileToStream < ActiveRecord::Migration
def change
rename_table :db_streams, :db_elements
rename_table :db_files, :db_streams
end
end
# frozen_string_literal: true
class RenameForeignElementKey < ActiveRecord::Migration
def change
rename_column :db_elements, :db_file_id, :db_stream_id
end
end
# frozen_string_literal: true
class RenameForeignDecimationKey < ActiveRecord::Migration
def change
rename_column :db_decimations, :db_file_id, :db_stream_id
end
end
# frozen_string_literal: true
class ExtendedDecimationLevelRange < ActiveRecord::Migration
def change
change_column :db_decimations, :level, :integer, limit: 8
end
end
...@@ -11,35 +11,33 @@ ...@@ -11,35 +11,33 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160709235828) do ActiveRecord::Schema.define(version: 20170104213820) do
create_table "db_decimations", force: :cascade do |t| create_table "db_decimations", force: :cascade do |t|
t.integer "start_time", limit: 8 t.integer "start_time", limit: 8
t.integer "end_time", limit: 8 t.integer "end_time", limit: 8
t.integer "total_rows", limit: 8 t.integer "total_rows", limit: 8
t.integer "total_time", limit: 8 t.integer "total_time", limit: 8
t.integer "db_file_id" t.integer "db_stream_id"
t.datetime "created_at", null: false t.datetime "created_at", null: false
t.datetime "updated_at", null: false t.datetime "updated_at", null: false
t.integer "level" t.integer "level", limit: 8
t.string "data_type" t.string "data_type"
end end
create_table "db_files", force: :cascade do |t| create_table "db_elements", force: :cascade do |t|
t.string "name" t.string "name"
t.string "description" t.string "units"
t.integer "db_folder_id" t.integer "column"
t.datetime "created_at", null: false t.float "default_max"
t.datetime "updated_at", null: false t.float "default_min"
t.string "path" t.float "scale_factor"
t.integer "start_time", limit: 8 t.float "offset"
t.integer "end_time", limit: 8 t.integer "db_stream_id"
t.integer "total_rows", limit: 8 t.datetime "created_at", null: false
t.integer "total_time", limit: 8 t.datetime "updated_at", null: false
t.string "data_type" t.boolean "plottable"
t.string "name_abbrev" t.boolean "discrete"
t.boolean "delete_locked"
t.boolean "hidden"
end end
create_table "db_folders", force: :cascade do |t| create_table "db_folders", force: :cascade do |t|
...@@ -54,17 +52,19 @@ ActiveRecord::Schema.define(version: 20160709235828) do ...@@ -54,17 +52,19 @@ ActiveRecord::Schema.define(version: 20160709235828) do
create_table "db_streams", force: :cascade do |t| create_table "db_streams", force: :cascade do |t|
t.string "name" t.string "name"
t.string "units" t.string "description"
t.integer "column" t.integer "db_folder_id"
t.float "default_max" t.datetime "created_at", null: false
t.float "default_min" t.datetime "updated_at", null: false
t.float "scale_factor" t.string "path"
t.float "offset" t.integer "start_time", limit: 8
t.integer "db_file_id" t.integer "end_time", limit: 8
t.datetime "created_at", null: false t.integer "total_rows", limit: 8
t.datetime "updated_at", null: false t.integer "total_time", limit: 8
t.boolean "plottable" t.string "data_type"
t.boolean "discrete" t.string "name_abbrev"
t.boolean "delete_locked"
t.boolean "hidden"
end end
create_table "dbs", force: :cascade do |t| create_table "dbs", force: :cascade do |t|
......
...@@ -15,4 +15,5 @@ describe DbAdapter do ...@@ -15,4 +15,5 @@ describe DbAdapter do
) )
end end
end end
end end
# frozen_string_literal: true # frozen_string_literal: true
require 'rails_helper' require 'rails_helper'
RSpec.describe DbFilesController, type: :controller do RSpec.describe DbElementsController, type: :controller do
end end
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
# generic DbStream # generic DbStream
FactoryGirl.define do FactoryGirl.define do
factory :db_file do factory :db_stream do
name { Faker::Lorem.word } name { Faker::Lorem.word }
end end
end end
...@@ -4,32 +4,32 @@ ...@@ -4,32 +4,32 @@
# are usually returned by DbAdapter.schema # are usually returned by DbAdapter.schema
class DbSchemaHelper class DbSchemaHelper
# schema data # schema data
def entry(path, metadata: {}, stream_count: 1) def entry(path, metadata: {}, element_count: 1)
{ {
path: path, path: path,
attributes: { attributes: {
data_type: "float32_#{stream_count}", data_type: "float32_#{element_count}",
start_time: 0, start_time: 0,
end_time: 0, end_time: 0,
total_rows: 0, total_rows: 0,
total_time: 0 total_time: 0
}.merge(metadata), }.merge(metadata),
streams: __build_streams(stream_count) elements: __build_elements(element_count)
} }
end end
# build stream hash for a file # build element hash for a file
def __build_streams(count) def __build_elements(count)
return {} unless count.positive? return {} unless count.positive?
streams = [] elements = []
(0..(count - 1)).each do |i| (0..(count - 1)).each do |i|
streams << elements <<
{ {
'name': "stream#{i}", 'name': "element#{i}",
'units': 'unit', 'units': 'unit',
'column': i 'column': i
} }
end end
streams elements
end end
end end
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
# generic DbStream # generic DbStream
FactoryGirl.define do FactoryGirl.define do
factory :db_stream do factory :db_file do
name { Faker::Lorem.word } name { Faker::Lorem.word }
end end
end end
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'DbElement' do
describe 'object' do
let(:db_element) { DbElement.new }
specify { expect(db_element).to respond_to(:name) }
specify { expect(db_element).to respond_to(:units) }
specify { expect(db_element).to respond_to(:column) }
specify { expect(db_element).to respond_to(:default_max) }
specify { expect(db_element).to respond_to(:default_min) }
specify { expect(db_element).to respond_to(:scale_factor) }
specify { expect(db_element).to respond_to(:offset) }
specify { expect(db_element).to respond_to(:plottable) }
specify { expect(db_element).to respond_to(:discrete) }
end
end
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'DbFile' do
describe 'object' do
let(:db_file) { DbFile.new }
specify { expect(db_file).to respond_to(:name) }
specify { expect(db_file).to respond_to(:name_abbrev) }
specify { expect(db_file).to respond_to(:description) }
specify { expect(db_file).to respond_to(:db_streams) }
specify { expect(db_file).to respond_to(:hidden) }
end
describe 'child streams' do
it 'are destroyed with parent file' do
stream = DbStream.create
file = DbFile.create
file.db_streams << stream
file.destroy
expect(DbStream.find_by_id(stream.id)).to be nil
end
it 'exist for every column in file datatype' do
file = DbFile.create(data_type: 'float32_3')
file.db_streams << DbStream.new
# missing 3 streams
expect(file.valid?).to be false
end
it 'do not exist for columns not in file datatype' do
file = DbFile.create(data_type: 'float32_1')
2.times do |x|
file.db_streams << DbStream.new(column: x)
end
expect(file.valid?).to be false
end
end
end
...@@ -8,7 +8,7 @@ RSpec.describe 'DbFolder' do ...@@ -8,7 +8,7 @@ RSpec.describe 'DbFolder' do
specify { expect(db_folder).to respond_to(:description) } specify { expect(db_folder).to respond_to(:description) }
specify { expect(db_folder).to respond_to(:parent) } specify { expect(db_folder).to respond_to(:parent) }
specify { expect(db_folder).to respond_to(:subfolders) } specify { expect(db_folder).to respond_to(:subfolders) }
specify { expect(db_folder).to respond_to(:db_files) } specify { expect(db_folder).to respond_to(:db_streams) }
specify { expect(db_folder).to respond_to(:hidden) } specify { expect(db_folder).to respond_to(:hidden) }
end end
...@@ -16,26 +16,26 @@ RSpec.describe 'DbFolder' do ...@@ -16,26 +16,26 @@ RSpec.describe 'DbFolder' do
before(:all) do before(:all) do
@folder = DbFolder.create @folder = DbFolder.create
@subfolder = DbFolder.create @subfolder = DbFolder.create
@file = DbFile.create @stream = DbStream.create
@folder.subfolders << @subfolder @folder.subfolders << @subfolder
@folder.db_files << @file @folder.db_streams << @stream
@folder.destroy @folder.destroy
end end
it 'removes subfolders' do it 'removes subfolders' do
expect(DbFolder.find_by_id(@subfolder.id)).to be_nil expect(DbFolder.find_by_id(@subfolder.id)).to be_nil
end end
it 'removes subfiles' do it 'removes streams' do
expect(DbFile.find_by_id(@file.id)).to be_nil expect(DbStream.find_by_id(@stream.id)).to be_nil
end end
end end
describe 'insert_file' do describe 'insert_stream' do
let(:db_folder) { FactoryGirl.create(:db_folder) } let(:db_folder) { FactoryGirl.create(:db_folder) }
let(:new_file) { FactoryGirl.create(:db_file) } let(:new_stream) { FactoryGirl.create(:db_stream) }
it 'adds the file to subfolders' do it 'adds the stream to the folder' do
db_folder.insert_file(file: new_file) db_folder.insert_stream(stream: new_stream)
expect(new_file.db_folder).to eq(db_folder) expect(new_stream.db_folder).to eq(db_folder)
end end
end end
end end
...@@ -5,13 +5,34 @@ RSpec.describe 'DbStream' do ...@@ -5,13 +5,34 @@ RSpec.describe 'DbStream' do
describe 'object' do describe 'object' do
let(:db_stream) { DbStream.new } let(:db_stream) { DbStream.new }
specify { expect(db_stream).to respond_to(:name) } specify { expect(db_stream).to respond_to(:name) }
specify { expect(db_stream).to respond_to(:units) } specify { expect(db_stream).to respond_to(:name_abbrev) }
specify { expect(db_stream).to respond_to(:column) } specify { expect(db_stream).to respond_to(:description) }
specify { expect(db_stream).to respond_to(:default_max) } specify { expect(db_stream).to respond_to(:db_elements) }
specify { expect(db_stream).to respond_to(:default_min) } specify { expect(db_stream).to respond_to(:hidden) }
specify { expect(db_stream).to respond_to(:scale_factor) } end
specify { expect(db_stream).to respond_to(:offset) }
specify { expect(db_stream).to respond_to(:plottable) } describe 'child elements' do
specify { expect(db_stream).to respond_to(:discrete) } it 'are destroyed with parent stream' do
element = DbElement.create
stream = DbStream.create
stream.db_elements << element
stream.destroy
expect(DbElement.find_by_id(element.id)).to be nil
end
it 'exist for every column in stream datatype' do
stream = DbStream.create(data_type: 'float32_3')
stream.db_elements << DbElement.new
# missing 3 elements
expect(stream.valid?).to be false
end
it 'do not exist for columns not in stream datatype' do
stream = DbStream.create(data_type: 'float32_1')
2.times do |x|
stream.db_elements << DbElement.new(column: x)
end
expect(stream.valid?).to be false
end
end end
end end
...@@ -6,21 +6,21 @@ helper = DbSchemaHelper.new ...@@ -6,21 +6,21 @@ helper = DbSchemaHelper.new
# a simple schema that could be returned # a simple schema that could be returned
# from DbAdapater.schema # from DbAdapater.schema
# folder1 # folder1
# `- file1_1: 4 streams # `- stream1_1: 4 elements
# - file1_2: 5 streams # - stream1_2: 5 elements
# folder2 # folder2
# '- file2_1: 1 stream # '- stream2_1: 1 element
# `- file2_2: 3 streams # `- stream2_2: 3 elements
simple_db = [ simple_db = [
helper.entry('/folder1/f1_1', helper.entry('/folder1/f1_1',
metadata: { name: 'file1_1' }, stream_count: 4), metadata: { name: 'stream1_1' }, element_count: 4),
helper.entry('/folder1/f1_2', helper.entry('/folder1/f1_2',
metadata: { name: 'file1_2' }, stream_count: 5), metadata: { name: 'stream1_2' }, element_count: 5),
helper.entry('/folder2/f2_1', helper.entry('/folder2/f2_1',
metadata: { name: 'file2_1' }, stream_count: 1), metadata: { name: 'stream2_1' }, element_count: 1),
helper.entry('/folder2/f2_2', helper.entry('/folder2/f2_2',
metadata: { name: 'file2_2' }, stream_count: 3) metadata: { name: 'stream2_2' }, element_count: 3)
] ]
describe 'UpdateDb' do describe 'UpdateDb' do
...@@ -37,44 +37,44 @@ describe 'UpdateDb' do ...@@ -37,44 +37,44 @@ describe 'UpdateDb' do
update_with_schema(simple_db) update_with_schema(simple_db)
expect(@root.name).to eq('root') expect(@root.name).to eq('root')
expect(@root.subfolders.count).to eq(2) expect(@root.subfolders.count).to eq(2)
expect(@root.db_files.count).to eq(0) expect(@root.db_streams.count).to eq(0)
end end
it 'builds sub-folder1' do it 'builds sub-folder1' do
update_with_schema(simple_db) update_with_schema(simple_db)
folder1 = @root.subfolders[0] folder1 = @root.subfolders[0]
expect(folder1.name).to eq('folder1') expect(folder1.name).to eq('folder1')
expect(folder1.db_files[0].name).to eq('file1_1') expect(folder1.db_streams[0].name).to eq('stream1_1')
expect(folder1.db_files[1].name).to eq('file1_2') expect(folder1.db_streams[1].name).to eq('stream1_2')
end end
it 'builds files in sub-folder1' do it 'builds streams in sub-folder1' do
update_with_schema(simple_db) update_with_schema(simple_db)
folder1 = @root.subfolders[0] folder1 = @root.subfolders[0]
expect(folder1.db_files.count).to eq(2) expect(folder1.db_streams.count).to eq(2)
file1 = folder1.db_files[0] stream1 = folder1.db_streams[0]
file2 = folder1.db_files[1] stream2 = folder1.db_streams[1]
expect(file1.db_streams.count).to eq(4) expect(stream1.db_elements.count).to eq(4)
expect(file2.db_streams.count).to eq(5) expect(stream2.db_elements.count).to eq(5)
end end
it 'builds sub-folder2' do it 'builds sub-folder2' do
update_with_schema(simple_db) update_with_schema(simple_db)
folder2 = @root.subfolders[1] folder2 = @root.subfolders[1]
expect(folder2.name).to eq('folder2') expect(folder2.name).to eq('folder2')
expect(folder2.db_files.count).to eq(2) expect(folder2.db_streams.count).to eq(2)
expect(folder2.db_files[0].name).to eq('file2_1') expect(folder2.db_streams[0].name).to eq('stream2_1')
expect(folder2.db_files[1].name).to eq('file2_2') expect(folder2.db_streams[1].name).to eq('stream2_2')
end end
end end
# decimation handling # decimation handling
describe 'given decimations' do describe 'given decimations' do
it 'adds decimations to files' do it 'adds decimations to streams' do
schema = Array.new(simple_db) schema = Array.new(simple_db)
schema << helper.entry('/folder1/f1_1~decim-4') schema << helper.entry('/folder1/f1_1~decim-4')
schema << helper.entry('/folder1/f1_1~decim-16') schema << helper.entry('/folder1/f1_1~decim-16')
update_with_schema(schema) update_with_schema(schema)
folder1 = @root.subfolders[0] folder1 = @root.subfolders[0]
file1 = folder1.db_files[0] stream1 = folder1.db_streams[0]
expect(file1.db_decimations.count).to eq(2) expect(stream1.db_decimations.count).to eq(2)
end end
it 'ignores orphaned decimations' do it 'ignores orphaned decimations' do
schema = Array.new(simple_db) schema = Array.new(simple_db)
...@@ -82,28 +82,28 @@ describe 'UpdateDb' do ...@@ -82,28 +82,28 @@ describe 'UpdateDb' do
schema << helper.entry('/folder1/f1_3~decim-4') schema << helper.entry('/folder1/f1_3~decim-4')
update_with_schema(schema) update_with_schema(schema)
folder1 = @root.subfolders[0] folder1 = @root.subfolders[0]
# expect just 2 files in this folder # expect just 2 streams in this folder
expect(folder1.db_files.count).to eq(2) expect(folder1.db_streams.count).to eq(2)
# and a warning about orphaned decimations # and a warning about orphaned decimations
expect(@service.warnings.count).to eq(1) expect(@service.warnings.count).to eq(1)
end end
end end
# info streams and metadata # info elements and metadata
describe 'uses metadata' do describe 'uses metadata' do
it 'from folder info stream' do it 'from folder info element' do
schema = Array.new(simple_db) schema = Array.new(simple_db)
schema << helper.entry('/folder1/info', metadata: { name: 'first' }) schema << helper.entry('/folder1/info', metadata: { name: 'first' })
update_with_schema(schema) update_with_schema(schema)
folder1 = @root.subfolders[0] folder1 = @root.subfolders[0]
expect(folder1.name).to eq('first') expect(folder1.name).to eq('first')
end end
it 'from base file' do it 'from base stream' do
schema = Array.new(simple_db) schema = Array.new(simple_db)
schema << helper.entry('/folder1/f1_meta', metadata: { name: 'custom' }) schema << helper.entry('/folder1/f1_meta', metadata: { name: 'custom' })
update_with_schema(schema) update_with_schema(schema)
folder1 = @root.subfolders[0] folder1 = @root.subfolders[0]
expect(folder1.db_files.find_by_name('custom')).to be_present expect(folder1.db_streams.find_by_name('custom')).to be_present
end end
end end
...@@ -116,29 +116,29 @@ describe 'UpdateDb' do ...@@ -116,29 +116,29 @@ describe 'UpdateDb' do
expect(@root.subfolders.find_by_name('lonely')).to be_present expect(@root.subfolders.find_by_name('lonely')).to be_present
end end
it 'handles chains of folders' do it 'handles chains of folders' do
schema = [helper.entry('/fa/fb/data', metadata: { name: 'the_file' })] schema = [helper.entry('/fa/fb/data', metadata: { name: 'the_stream' })]
update_with_schema(schema) update_with_schema(schema)
file = DbFile.find_by_name('the_file') stream = DbStream.find_by_name('the_stream')
expect(file.db_folder.parent.parent).to eq(@root) expect(stream.db_folder.parent.parent).to eq(@root)
end end
end end
# updates to remote db # updates to remote db
describe 'given changes to remote db' do describe 'given changes to remote db' do
it 'removes missing files' do it 'removes missing streams' do
# create Db with a file 'temp' # create Db with a stream 'temp'
update_with_schema([helper.entry('/folder1/temp'), update_with_schema([helper.entry('/folder1/temp'),
helper.entry('/folder1/info', helper.entry('/folder1/info',
metadata: { name: 'f1' })]) metadata: { name: 'f1' })])
temp = DbFile.find_by_name('temp') temp = DbStream.find_by_name('temp')
# the file 'temp' should be here # the stream 'temp' should be here
expect(temp).to be_present expect(temp).to be_present
# update Db without 'temp' # update Db without 'temp'
update_with_schema([helper.entry('/folder1/info', update_with_schema([helper.entry('/folder1/info',
metadata: { name: 'f1' })], metadata: { name: 'f1' })],
db: @db) db: @db)
# it should be gone # it should be gone
expect(DbFile.find_by_name('temp')).to be nil expect(DbStream.find_by_name('temp')).to be nil
# ...and the service should have a warning # ...and the service should have a warning
expect(@service.warnings?).to be true expect(@service.warnings?).to be true
end end
...@@ -148,7 +148,7 @@ describe 'UpdateDb' do ...@@ -148,7 +148,7 @@ describe 'UpdateDb' do
helper.entry('/folder1/temp/info', helper.entry('/folder1/temp/info',
metadata: { name: 'temp' })]) metadata: { name: 'temp' })])
temp = DbFolder.find_by_name('temp') temp = DbFolder.find_by_name('temp')
# the file 'temp' should be here # the stream 'temp' should be here
expect(temp).to be_present expect(temp).to be_present
# update Db without 'temp' # update Db without 'temp'
update_with_schema([helper.entry('/folder1/stub')], update_with_schema([helper.entry('/folder1/stub')],
...@@ -158,24 +158,24 @@ describe 'UpdateDb' do ...@@ -158,24 +158,24 @@ describe 'UpdateDb' do
# ...and the service should have a warning # ...and the service should have a warning
expect(@service.warnings?).to be true expect(@service.warnings?).to be true
end end
it 'adds new files' do it 'adds new streams' do
# create Db with 1 folder and file # create Db with 1 folder and stream
update_with_schema([helper.entry('/folder1/old_file')]) update_with_schema([helper.entry('/folder1/old_stream')])
@folder = @root.subfolders.first @folder = @root.subfolders.first
expect(@folder.db_files.count).to eq(1) expect(@folder.db_streams.count).to eq(1)
# run update again with a new file added # run update again with a new stream added
update_with_schema([helper.entry('/folder1/old_file'), update_with_schema([helper.entry('/folder1/old_stream'),
helper.entry('/folder1/new_file')], helper.entry('/folder1/new_stream')],
db: @db) db: @db)
expect(@folder.db_files.count).to eq(2) expect(@folder.db_streams.count).to eq(2)
end end
it 'adds new folders' do it 'adds new folders' do
# create Db with 1 folder and file # create Db with 1 folder and stream
update_with_schema([helper.entry('/folder1/old_file')]) update_with_schema([helper.entry('/folder1/old_stream')])
@folder = @root.subfolders.first @folder = @root.subfolders.first
expect(@folder.subfolders.count).to eq(0) expect(@folder.subfolders.count).to eq(0)
# run update again with a new file added # run update again with a new stream added
update_with_schema([helper.entry('/folder1/old_file'), update_with_schema([helper.entry('/folder1/old_stream'),
helper.entry('/folder1/new_folder/info')], helper.entry('/folder1/new_folder/info')],
db: @db) db: @db)
expect(@folder.subfolders.count).to eq(1) expect(@folder.subfolders.count).to eq(1)
......
...@@ -3,37 +3,37 @@ ...@@ -3,37 +3,37 @@
require 'rails_helper' require 'rails_helper'
helper = DbSchemaHelper.new helper = DbSchemaHelper.new
describe 'UpdateFile service' do describe 'UpdateStream service' do
let(:db) { Db.new } let(:db) { Db.new }
let(:service) { UpdateDb.new(db: db) } let(:service) { UpdateDb.new(db: db) }
it 'updates file info' do it 'updates stream info' do
# create Db with 1 folder and file # create Db with 1 folder and stream
service.run([helper.entry('/folder1/file1', service.run([helper.entry('/folder1/stream1',
metadata: { name: 'old_name' })]) metadata: { name: 'old_name' })])
file = DbFile.find_by_name('old_name') stream = DbStream.find_by_name('old_name')
expect(file).to be_present expect(stream).to be_present
# run update again with new metadata # run update again with new metadata
service = UpdateDb.new(db: db) service = UpdateDb.new(db: db)
service.run([helper.entry('/folder1/file1', service.run([helper.entry('/folder1/stream1',
metadata: { name: 'new_name' })]) metadata: { name: 'new_name' })])
file.reload stream.reload
expect(file.name).to eq('new_name') expect(stream.name).to eq('new_name')
end end
it 'updates stream info' do it 'updates element info' do
# create Db with file with 1 stream # create Db with stream with 1 element
schema = [helper.entry('/folder1/subfolder/file', schema = [helper.entry('/folder1/subfolder/stream',
stream_count: 1)] element_count: 1)]
schema[0][:streams][0][:name] = 'old_name' schema[0][:elements][0][:name] = 'old_name'
service.run(schema) service.run(schema)
stream = DbStream.find_by_name('old_name') element = DbElement.find_by_name('old_name')
expect(stream).to be_present expect(element).to be_present
# run update again with new metadata # run update again with new metadata
schema[0][:streams][0][:name] = 'new_name' schema[0][:elements][0][:name] = 'new_name'
service = UpdateDb.new(db: db) service = UpdateDb.new(db: db)
service.run(schema) service.run(schema)
stream.reload element.reload
expect(stream.name).to eq('new_name') expect(element.name).to eq('new_name')
end end
end end
...@@ -3,16 +3,15 @@ ...@@ -3,16 +3,15 @@
require 'rails_helper' require 'rails_helper'
describe 'EditFolder service' do describe 'EditFolder service' do
let(:mock_adapter) { } let(:mock_adapter) {}
let(:service) { EditFolder.new(mock_adapter) } let(:service) { EditFolder.new(mock_adapter) }
it 'changes folder attributes' do it 'changes folder attributes' do
folder = DbFolder.new(name: 'old') folder = DbFolder.new(name: 'old')
service.run(folder,name: 'new') service.run(folder, name: 'new')
expect(mock_adapter).to be called once expect(mock_adapter).to be called once
expect(folder.name).to eq('new') expect(folder.name).to eq('new')
end end
it 'does not change folder on a server error'
end it 'does not change folder on a server error'
\ No newline at end of file end
# frozen_string_literal: true # frozen_string_literal: true
require 'rails_helper' require 'rails_helper'
test_nilm_url = 'http://nilm.secondary' test_nilm_url = 'http://192.168.42.17'
RSpec.describe 'CreateNilm' do RSpec.describe 'CreateNilm' do
describe 'build' do describe 'build' do
it 'creates and populates a Db object' do it 'creates and populates a Db object', :vcr do
# mock the database updater # mock the database updater
service = instance_double(UpdateDb, run: '') service = instance_double(UpdateDb, run: '')
allow(UpdateDb).to receive(:new).and_return(service) allow(UpdateDb).to receive(:new).and_return(service)
......
# frozen_string_literal: true
# This file was generated by the `rails generate rspec:install` command. Conventionally, all # This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`. # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause # The generated `.rspec` file contains `--require spec_helper` which will cause
...@@ -43,53 +44,51 @@ RSpec.configure do |config| ...@@ -43,53 +44,51 @@ RSpec.configure do |config|
mocks.verify_partial_doubles = true mocks.verify_partial_doubles = true
end end
# The settings below are suggested to provide a good initial experience # The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content. # with RSpec, but feel free to customize to your heart's content.
=begin # # These two settings work together to allow you to limit a spec run
# These two settings work together to allow you to limit a spec run # # to individual examples or groups you care about by tagging them with
# to individual examples or groups you care about by tagging them with # # `:focus` metadata. When nothing is tagged with `:focus`, all examples
# `:focus` metadata. When nothing is tagged with `:focus`, all examples # # get run.
# get run. # config.filter_run :focus
config.filter_run :focus # config.run_all_when_everything_filtered = true
config.run_all_when_everything_filtered = true #
# # Allows RSpec to persist some state between runs in order to support
# Allows RSpec to persist some state between runs in order to support # # the `--only-failures` and `--next-failure` CLI options. We recommend
# the `--only-failures` and `--next-failure` CLI options. We recommend # # you configure your source control system to ignore this file.
# you configure your source control system to ignore this file. # config.example_status_persistence_file_path = "spec/examples.txt"
config.example_status_persistence_file_path = "spec/examples.txt" #
# # Limits the available syntax to the non-monkey patched syntax that is
# Limits the available syntax to the non-monkey patched syntax that is # # recommended. For more details, see:
# recommended. For more details, see: # # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/ # # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/ # # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode # config.disable_monkey_patching!
config.disable_monkey_patching! #
# # Many RSpec users commonly either run the entire suite or an individual
# Many RSpec users commonly either run the entire suite or an individual # # file, and it's useful to allow more verbose output when running an
# file, and it's useful to allow more verbose output when running an # # individual spec file.
# individual spec file. # if config.files_to_run.one?
if config.files_to_run.one? # # Use the documentation formatter for detailed output,
# Use the documentation formatter for detailed output, # # unless a formatter has already been configured
# unless a formatter has already been configured # # (e.g. via a command-line flag).
# (e.g. via a command-line flag). # config.default_formatter = 'doc'
config.default_formatter = 'doc' # end
end #
# # Print the 10 slowest examples and example groups at the
# Print the 10 slowest examples and example groups at the # # end of the spec run, to help surface which specs are running
# end of the spec run, to help surface which specs are running # # particularly slow.
# particularly slow. # config.profile_examples = 10
config.profile_examples = 10 #
# # Run specs in random order to surface order dependencies. If you find an
# Run specs in random order to surface order dependencies. If you find an # # order dependency and want to debug it, you can fix the order by providing
# order dependency and want to debug it, you can fix the order by providing # # the seed, which is printed after each run.
# the seed, which is printed after each run. # # --seed 1234
# --seed 1234 # config.order = :random
config.order = :random #
# # Seed global randomization in this process using the `--seed` CLI option.
# Seed global randomization in this process using the `--seed` CLI option. # # Setting this allows you to use `--seed` to deterministically reproduce
# Setting this allows you to use `--seed` to deterministically reproduce # # test failures related to randomization by passing the same `--seed` value
# test failures related to randomization by passing the same `--seed` value # # as the one that triggered the failure.
# as the one that triggered the failure. # Kernel.srand config.seed
Kernel.srand config.seed
=end
end end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment