diff --git a/lib/fhir_models/bootstrap/definitions.rb b/lib/fhir_models/bootstrap/definitions.rb
index 80c7a5045..4221669fd 100644
--- a/lib/fhir_models/bootstrap/definitions.rb
+++ b/lib/fhir_models/bootstrap/definitions.rb
@@ -1,7 +1,6 @@
require 'tempfile'
module FHIR
class Definitions
-
@@defns = File.expand_path '../definitions', File.dirname(File.absolute_path(__FILE__))
@@types = nil
@@resources = nil
@@ -20,28 +19,28 @@ def self.load_types
# load the types
filename = File.join(@@defns, 'structures', 'profiles-types.json')
raw = File.open(filename, 'r:UTF-8', &:read)
- @@types = JSON.parse(raw)['entry'].map{|e|e['resource']}
+ @@types = JSON.parse(raw)['entry'].map { |e| e['resource'] }
end
end
def self.get_primitive_types
load_types
# primitive data types start with a lowercase letter
- @@types.select{|t|t['id'][0]==t['id'][0].downcase}
+ @@types.select { |t| t['id'][0] == t['id'][0].downcase }
end
def self.get_complex_types
load_types
# complex data types start with an uppercase letter
# and we'll filter out profiles on types (for example, Age is a profile on Quantity)
- @@types.select{|t| (t['id'][0]==t['id'][0].upcase) && (t['id']==t['snapshot']['element'].first['path'])}
+ @@types.select { |t| (t['id'][0] == t['id'][0].upcase) && (t['id'] == t['snapshot']['element'].first['path']) }
end
def self.get_type_definition(type_name)
return nil if type_name.nil?
load_types
- d = @@types.find{|x|x['xmlId']==type_name || x['name']==type_name || x['url']==type_name}
- d = FHIR::StructureDefinition.new(d) if !d.nil?
+ d = @@types.find { |x| x['xmlId'] == type_name || x['name'] == type_name || x['url'] == type_name }
+ d = FHIR::StructureDefinition.new(d) unless d.nil?
d
end
@@ -54,7 +53,7 @@ def self.load_resources
# load the resources
filename = File.join(@@defns, 'structures', 'profiles-resources.json')
raw = File.open(filename, 'r:UTF-8', &:read)
- @@resources = JSON.parse(raw)['entry'].map{|e|e['resource']}
+ @@resources = JSON.parse(raw)['entry'].map { |e| e['resource'] }
end
end
@@ -66,8 +65,8 @@ def self.get_resource_definitions
def self.get_resource_definition(resource_name)
return nil if resource_name.nil?
load_resources
- d = @@resources.find{|x|x['xmlId']==resource_name || x['name']==resource_name || x['url']==resource_name}
- d = FHIR::StructureDefinition.new(d) if !d.nil?
+ d = @@resources.find { |x| x['xmlId'] == resource_name || x['name'] == resource_name || x['url'] == resource_name }
+ d = FHIR::StructureDefinition.new(d) unless d.nil?
d
end
@@ -76,7 +75,7 @@ def self.load_profiles
# load the built-in profiles
filename = File.join(@@defns, 'structures', 'profiles-others.json')
raw = File.open(filename, 'r:UTF-8', &:read)
- @@profiles = JSON.parse(raw)['entry'].map{|e|e['resource']}
+ @@profiles = JSON.parse(raw)['entry'].map { |e| e['resource'] }
end
end
@@ -85,15 +84,15 @@ def self.load_extensions
# load the built-in extensions
filename = File.join(@@defns, 'structures', 'extension-definitions.json')
raw = File.open(filename, 'r:UTF-8', &:read)
- @@extensions = JSON.parse(raw)['entry'].map{|e|e['resource']}
+ @@extensions = JSON.parse(raw)['entry'].map { |e| e['resource'] }
end
end
def self.get_extension_definition(extension_name)
return nil if extension_name.nil?
load_extensions
- d = @@extensions.find{|x|x['xmlId']==extension_name || x['name']==extension_name || x['url']==extension_name}
- d = FHIR::StructureDefinition.new(d) if !d.nil?
+ d = @@extensions.find { |x| x['xmlId'] == extension_name || x['name'] == extension_name || x['url'] == extension_name }
+ d = FHIR::StructureDefinition.new(d) unless d.nil?
d
end
@@ -103,11 +102,11 @@ def self.get_basetype(uri)
load_profiles
load_extensions
- defn = @@profiles.select{|x|x['url']==uri}.first
- defn = @@extensions.select{|x|x['url']==uri}.first if defn.nil?
+ defn = @@profiles.select { |x| x['url'] == uri }.first
+ defn = @@extensions.select { |x| x['url'] == uri }.first if defn.nil?
basetype = nil
- basetype = defn['baseType'] if !defn.nil?
+ basetype = defn['baseType'] unless defn.nil?
basetype
end
@@ -117,18 +116,18 @@ def self.get_profile(uri)
load_profiles
load_extensions
- defn = @@profiles.select{|x|x['url']==uri}.first
- defn = @@extensions.select{|x|x['url']==uri}.first if defn.nil?
+ defn = @@profiles.select { |x| x['url'] == uri }.first
+ defn = @@extensions.select { |x| x['url'] == uri }.first if defn.nil?
profile = nil
- profile = FHIR::StructureDefinition.new(defn) if !defn.nil?
+ profile = FHIR::StructureDefinition.new(defn) unless defn.nil?
profile
end
def self.get_profiles_for_resource(resource_name)
return nil if resource_name.nil?
load_profiles
- @@profiles.select{|x|x['baseType']==resource_name}.map{|x| FHIR::StructureDefinition.new(x) }
+ @@profiles.select { |x| x['baseType'] == resource_name }.map { |x| FHIR::StructureDefinition.new(x) }
end
# Get a dynamically generated class for a given profile.
@@ -137,22 +136,22 @@ def self.get_profile_class(uri)
load_profiles
load_extensions
- defn = @@profiles.select{|x|x['url']==uri}.first
- defn = @@extensions.select{|x|x['url']==uri}.first if defn.nil?
+ defn = @@profiles.select { |x| x['url'] == uri }.first
+ defn = @@extensions.select { |x| x['url'] == uri }.first if defn.nil?
klass = nil
- if !defn.nil?
+ unless defn.nil?
generator = FHIR::Boot::Generator.new(false)
type = defn['baseType']
id = defn['id'].gsub(/-|_/, '').capitalize
defn['id'] = type # override profile id with baseType name for generator
- template = generator.generate_class([ type ], defn)
+ template = generator.generate_class([type], defn)
f = Tempfile.new(["profile-#{id}", '.rb'])
f.write("module FHIR\n")
f.write("module Profile\n")
f.write("module #{id}\n")
f.write(template.to_s)
- 3.times{f.write("\nend")}
+ 3.times { f.write("\nend") }
f.close
begin
# load the profiled class
@@ -177,13 +176,13 @@ def self.load_expansions
# load the expansions
filename = File.join(@@defns, 'valuesets', 'expansions.json')
raw = File.open(filename, 'r:UTF-8', &:read)
- @@expansions = JSON.parse(raw)['entry'].map{|e|e['resource']}
+ @@expansions = JSON.parse(raw)['entry'].map { |e| e['resource'] }
end
if @@valuesets.nil?
# load the valuesets
filename = File.join(@@defns, 'valuesets', 'valuesets.json')
raw = File.open(filename, 'r:UTF-8', &:read)
- @@valuesets = JSON.parse(raw)['entry'].map{|e|e['resource']}
+ @@valuesets = JSON.parse(raw)['entry'].map { |e| e['resource'] }
end
end
@@ -192,20 +191,20 @@ def self.get_codes(uri)
return nil if uri.nil?
load_expansions
codes = nil
- valueset = @@expansions.select{|x|x['url']==uri}.first
- if !valueset.nil?
+ valueset = @@expansions.select { |x| x['url'] == uri }.first
+ unless valueset.nil?
codes = {}
if !valueset['expansion'].nil? && !valueset['expansion']['contains'].nil?
- keys = valueset['expansion']['contains'].map{|x|x['system']}.uniq
- keys.each{|x| codes[x]=[]}
- valueset['expansion']['contains'].each{|x| codes[x['system']] << x['code']}
+ keys = valueset['expansion']['contains'].map { |x| x['system'] }.uniq
+ keys.each { |x| codes[x] = [] }
+ valueset['expansion']['contains'].each { |x| codes[x['system']] << x['code'] }
end
if !valueset['compose'].nil? && !valueset['compose']['include'].nil?
- included_systems = valueset['compose']['include'].map{|x|x['system']}.uniq
- included_systems.each{|x| codes[x]=[] if !codes.keys.include?(x) }
- systems = @@valuesets.select{|x|x['resourceType']=='CodeSystem' && included_systems.include?(x['url'])}
+ included_systems = valueset['compose']['include'].map { |x| x['system'] }.uniq
+ included_systems.each { |x| codes[x] = [] unless codes.keys.include?(x) }
+ systems = @@valuesets.select { |x| x['resourceType'] == 'CodeSystem' && included_systems.include?(x['url']) }
systems.each do |x|
- x['concept'].each{|y| codes[x['url']] << y['code']}
+ x['concept'].each { |y| codes[x['url']] << y['code'] }
end
end
end
@@ -221,17 +220,16 @@ def self.load_search_params
# load the search parameters
filename = File.join(@@defns, 'structures', 'search-parameters.json')
raw = File.open(filename, 'r:UTF-8', &:read)
- @@search_params = JSON.parse(raw)['entry'].map{|e|e['resource']}
+ @@search_params = JSON.parse(raw)['entry'].map { |e| e['resource'] }
end
end
def self.get_search_parameters(type_name)
return nil if type_name.nil?
load_search_params
- @@search_params.select{|p|p['base']==type_name && p['xpath'] && !p['xpath'].include?('extension')}.map{|p|p['code']}
+ @@search_params.select { |p| p['base'] == type_name && p['xpath'] && !p['xpath'].include?('extension') }.map { |p| p['code'] }
end
private_class_method :load_types, :load_extensions, :load_expansions, :load_profiles, :load_resources, :load_search_params
-
end
end
diff --git a/lib/fhir_models/bootstrap/field.rb b/lib/fhir_models/bootstrap/field.rb
index b0f91a0b8..343305b44 100644
--- a/lib/fhir_models/bootstrap/field.rb
+++ b/lib/fhir_models/bootstrap/field.rb
@@ -1,6 +1,5 @@
module FHIR
class Field
-
attr_accessor :name
attr_accessor :local_name
attr_accessor :path
@@ -13,7 +12,7 @@ class Field
attr_accessor :regex
attr_accessor :constraint
- def initialize(name='')
+ def initialize(name = '')
@name = name
@local_name = fix_name(@name)
@type_profiles = []
@@ -21,13 +20,13 @@ def initialize(name='')
end
def serialize
- hash = Hash.new
- self.instance_variables.each do |v|
- hash[v.to_s[1..-1]] = self.instance_variable_get(v)
+ hash = {}
+ instance_variables.each do |v|
+ hash[v.to_s[1..-1]] = instance_variable_get(v)
end
hash.delete('name')
hash.keep_if do |_key, value|
- !value.nil? && ( (value.is_a?(Hash) && !value.empty?) ||
+ !value.nil? && ((value.is_a?(Hash) && !value.empty?) ||
(value.is_a?(Array) && !value.empty?) ||
(!value.is_a?(Hash) && !value.is_a?(Array))
)
@@ -37,9 +36,8 @@ def serialize
def fix_name(name)
fix = nil
- fix = "local_#{name}" if ['class', 'method', 'resourceType'].include?(name)
+ fix = "local_#{name}" if %w(class method resourceType).include?(name)
fix
end
-
end
end
diff --git a/lib/fhir_models/bootstrap/generator.rb b/lib/fhir_models/bootstrap/generator.rb
index 156e3bc03..cbcd03d4b 100644
--- a/lib/fhir_models/bootstrap/generator.rb
+++ b/lib/fhir_models/bootstrap/generator.rb
@@ -1,13 +1,12 @@
module FHIR
module Boot
class Generator
-
attr_accessor :lib
attr_accessor :defn
# templates keeps track of all the templates in context within a given StructureDefinition
attr_accessor :templates
- def initialize(auto_setup=true)
+ def initialize(auto_setup = true)
# load the valueset expansions
@defn = FHIR::Definitions
# templates is an array
@@ -18,13 +17,13 @@ def initialize(auto_setup=true)
def setup
# make folders for generated content if they do not exist
@lib = File.expand_path '..', File.dirname(File.absolute_path(__FILE__))
- Dir.mkdir(File.join(@lib, 'fhir')) if !Dir.exist?(File.join(@lib, 'fhir'))
- Dir.mkdir(File.join(@lib, 'fhir', 'types')) if !Dir.exist?(File.join(@lib, 'fhir', 'types'))
- Dir.mkdir(File.join(@lib, 'fhir', 'resources')) if !Dir.exist?(File.join(@lib, 'fhir', 'resources'))
+ Dir.mkdir(File.join(@lib, 'fhir')) unless Dir.exist?(File.join(@lib, 'fhir'))
+ Dir.mkdir(File.join(@lib, 'fhir', 'types')) unless Dir.exist?(File.join(@lib, 'fhir', 'types'))
+ Dir.mkdir(File.join(@lib, 'fhir', 'resources')) unless Dir.exist?(File.join(@lib, 'fhir', 'resources'))
# delete previously generated folder contents
- Dir.glob(File.join(@lib, 'fhir', '*')).each{|f|File.delete(f) if !File.directory?(f)}
- Dir.glob(File.join(@lib, 'fhir', '**', '*')).each{|f|File.delete(f) if !File.directory?(f)}
+ Dir.glob(File.join(@lib, 'fhir', '*')).each { |f| File.delete(f) unless File.directory?(f) }
+ Dir.glob(File.join(@lib, 'fhir', '**', '*')).each { |f| File.delete(f) unless File.directory?(f) }
end
def generate_metadata
@@ -37,30 +36,26 @@ def generate_metadata
field.name = nil
# try to find the element that describes the value
- type = p['snapshot']['element'].select{|e| e['path'].end_with?('.value')}.first['type'].first
+ type = p['snapshot']['element'].select { |e| e['path'].end_with?('.value') }.first['type'].first
# try to find the JSON data type
- ext = type['_code']['extension'].find{|e| e['url']=='http://hl7.org/fhir/StructureDefinition/structuredefinition-json-type'}
- if ext
- field.type = ext['valueString']
- else
- field.type = 'string'
- end
+ ext = type['_code']['extension'].find { |e| e['url'] == 'http://hl7.org/fhir/StructureDefinition/structuredefinition-json-type' }
+ field.type = ext ? ext['valueString'] : 'string'
# try to find a regex
if type['extension']
- ext = type['extension'].find{|e| e['url']=='http://hl7.org/fhir/StructureDefinition/structuredefinition-regex'}
+ ext = type['extension'].find { |e| e['url'] == 'http://hl7.org/fhir/StructureDefinition/structuredefinition-regex' }
field.regex = ext['valueString'] if ext
end
- hash[ p['id' ] ] = field.serialize
+ hash[p['id']] = field.serialize
end
template.constants['PRIMITIVES'] = hash
- template.constants['TYPES'] = @defn.get_complex_types.map{|t|t['id']}
+ template.constants['TYPES'] = @defn.get_complex_types.map { |t| t['id'] }
# resources
- template.constants['RESOURCES'] = @defn.get_resource_definitions.map{|r|r['id']}
+ template.constants['RESOURCES'] = @defn.get_resource_definitions.map { |r| r['id'] }
filename = File.join(@lib, 'fhir', 'metadata.rb')
file = File.open(filename, 'w:UTF-8')
@@ -81,13 +76,13 @@ def generate_resources
generate_class_files(folder, @defn.get_resource_definitions)
end
- def generate_class_files(folder=@lib, structure_defs)
+ def generate_class_files(folder = @lib, structure_defs = [])
structure_defs.each do |structure_def|
@templates.clear
type_name = structure_def['id']
- template = generate_class([ type_name ], structure_def, true)
+ template = generate_class([type_name], structure_def, true)
params = @defn.get_search_parameters(type_name)
- template.constants['SEARCH_PARAMS'] = params if !params.nil?
+ template.constants['SEARCH_PARAMS'] = params unless params.nil?
filename = File.join(folder, "#{type_name}.rb")
file = File.open(filename, 'w:UTF-8')
file.write(template.to_s)
@@ -101,13 +96,13 @@ def cap_first(string)
t
end
- def generate_class(hierarchy, structure_def, top_level=false)
+ def generate_class(hierarchy, structure_def, top_level = false)
type_name = structure_def['id']
constrained_type = structure_def['constrained_type']
path_type = type_name
path_type = constrained_type if constrained_type
- template = FHIR::Boot::Template.new([ type_name ], top_level)
+ template = FHIR::Boot::Template.new([type_name], top_level)
template.hierarchy = hierarchy
template.kind = structure_def['kind']
return template if structure_def['snapshot'].nil? || structure_def['snapshot']['element'].nil?
@@ -120,25 +115,24 @@ def generate_class(hierarchy, structure_def, top_level=false)
child_templates = []
structure_def['snapshot']['element'].each do |element|
# skip the first element
- next if element['path']==path_type
- if element['type']
- unique_types = element['type'].map{|t|t['code']}.uniq
- if unique_types.include?('Element') || unique_types.include?('BackboneElement')
- child_templates << element['path']
- end
+ next if element['path'] == path_type
+ next unless element['type']
+ unique_types = element['type'].map { |t| t['code'] }.uniq
+ if unique_types.include?('Element') || unique_types.include?('BackboneElement')
+ child_templates << element['path']
end
end
# now build the child templates...
child_templates.each do |child_name|
child_fixed_name = cap_first(child_name.gsub("#{type_name}.", ''))
next if child_fixed_name.include?('.')
- child_def = { 'id'=> child_fixed_name, 'snapshot'=>{ 'element'=>[] } }
+ child_def = { 'id' => child_fixed_name, 'snapshot' => { 'element' => [] } }
# Copy the element definitions for the child structure
structure_def['snapshot']['element'].each do |element|
child_def['snapshot']['element'] << element.clone if element['path'].start_with?("#{child_name}.")
end
# Remove the child elements
- child_paths = child_def['snapshot']['element'].map{|e|e['path']}
+ child_paths = child_def['snapshot']['element'].map { |e| e['path'] }
# child_paths = child_paths.drop(1)
structure_def['snapshot']['element'].keep_if do |element|
!child_paths.include?(element['path'])
@@ -148,7 +142,7 @@ def generate_class(hierarchy, structure_def, top_level=false)
element['path'] = element['path'].gsub(child_name, child_fixed_name)
end
# add the child
- child_hierarchy = hierarchy + [ child_fixed_name ]
+ child_hierarchy = hierarchy + [child_fixed_name]
child_klass = generate_class(child_hierarchy, child_def)
template.templates << child_klass
@templates << child_klass
@@ -157,7 +151,7 @@ def generate_class(hierarchy, structure_def, top_level=false)
# Process the remaining attributes (none of which are Elements or BackboneElements)
structure_def['snapshot']['element'].each do |element|
# skip the first element
- next if element['path']==path_type
+ next if element['path'] == path_type
field_base_name = element['path'].gsub("#{path_type}.", '')
# If the element has a type, treat it as a datatype or resource
@@ -165,34 +159,34 @@ def generate_class(hierarchy, structure_def, top_level=false)
if !element['type'].nil?
# profiles contains a list of profiles if the datatype is Reference or Extension
profiles = []
- element['type'].select{|t|t['code']=='Reference' || t['code']=='Extension'}.each do |data_type|
+ element['type'].select { |t| t['code'] == 'Reference' || t['code'] == 'Extension' }.each do |data_type|
profiles << data_type['profile']
end
- profiles.select!{|p|!p.nil?}
+ profiles.select! { |p| !p.nil? }
profiles.flatten!
# Calculate fields that have multiple data types
if element['type'].length > 1
fieldname = field_base_name.gsub('[x]', '')
- unique_types = element['type'].map{|t|t['code']}.uniq
- multiple_data_types[fieldname] = unique_types if(unique_types.length>1)
+ unique_types = element['type'].map { |t| t['code'] }.uniq
+ multiple_data_types[fieldname] = unique_types if unique_types.length > 1
end
# generate a field for each valid datatype... this is for things like Resource.attribute[x]
- element['type'].map{|t|t['code']}.uniq.each do |data_type|
+ element['type'].map { |t| t['code'] }.uniq.each do |data_type|
capitalized = cap_first(data_type)
fieldname = field_base_name.gsub('[x]', capitalized)
field = FHIR::Field.new(fieldname)
field.path = element['path'].gsub(path_type, type_name)
field.type = data_type
field.type = 'Extension' if field.path.end_with?('extension')
- field.type_profiles = profiles if(data_type=='Reference' || data_type=='Extension')
+ field.type_profiles = profiles if data_type == 'Reference' || data_type == 'Extension'
field.min = element['min']
field.max = element['max']
field.max = field.max.to_i
- field.max = '*' if element['max']=='*'
+ field.max = '*' if element['max'] == '*'
- if ['code', 'Coding', 'CodeableConcept'].include?(data_type) && element['binding']
+ if %w(code Coding CodeableConcept).include?(data_type) && element['binding']
field.binding = element['binding']
field.binding['uri'] = field.binding['valueSetUri']
field.binding['uri'] = field.binding['valueSetReference'] if field.binding['uri'].nil?
@@ -202,10 +196,10 @@ def generate_class(hierarchy, structure_def, top_level=false)
field.binding.delete('description')
field.binding.delete('extension')
# set the actual code list
- codes = @defn.get_codes( field.binding['uri'] )
- field.valid_codes = codes if !codes.nil?
+ codes = @defn.get_codes(field.binding['uri'])
+ field.valid_codes = codes unless codes.nil?
FHIR.logger.warn " MISSING EXPANSION -- #{field.path} #{field.min}..#{field.max}: #{field.binding['uri']} (#{field.binding['strength']})" if field.valid_codes.empty? && field.binding['uri'] && !field.binding['uri'].end_with?('bcp47') && !field.binding['uri'].end_with?('bcp13.txt')
- elsif ['Element', 'BackboneElement'].include?(data_type)
+ elsif %w(Element BackboneElement).include?(data_type)
# This is a nested structure or class
field.type = "#{hierarchy.join('::')}::#{cap_first(field.name)}"
end
@@ -216,33 +210,32 @@ def generate_class(hierarchy, structure_def, top_level=false)
field = FHIR::Field.new(field_base_name)
field.path = element['path'].gsub(path_type, type_name)
field.type = element['contentReference']
- field.type = field.type[1..-1] if field.type[0]=='#'
- if (hierarchy.last==field.type)
+ field.type = field.type[1..-1] if field.type[0] == '#'
+ if hierarchy.last == field.type
# reference to self
- field.type = "#{hierarchy.join('::')}"
+ field.type = hierarchy.join('::').to_s
else
# reference to contained template
- klass = @templates.select{|x|x.hierarchy.last==field.type}.first
- if !klass.nil?
- # the template/child class was declared somewhere else in this class hierarchy
- field.type = klass.hierarchy.join('::')
- else
- # the template/child is a direct ancester (it isn't in @templates yet because it is being defined now)
- field.type = field.type.split('.').map{|x| cap_first(x) }.join('::')
- end
+ klass = @templates.select { |x| x.hierarchy.last == field.type }.first
+ field.type = if !klass.nil?
+ # the template/child class was declared somewhere else in this class hierarchy
+ klass.hierarchy.join('::')
+ else
+ # the template/child is a direct ancester (it isn't in @templates yet because it is being defined now)
+ field.type.split('.').map { |x| cap_first(x) }.join('::')
+ end
end
field.min = element['min']
field.max = element['max']
field.max = field.max.to_i
- field.max = '*' if element['max']=='*'
+ field.max = '*' if element['max'] == '*'
template.fields << field
end
end
- template.constants['MULTIPLE_TYPES'] = multiple_data_types if !multiple_data_types.empty?
+ template.constants['MULTIPLE_TYPES'] = multiple_data_types unless multiple_data_types.empty?
template
end
-
end
end
end
diff --git a/lib/fhir_models/bootstrap/hashable.rb b/lib/fhir_models/bootstrap/hashable.rb
index d0cfa7dfc..062628897 100644
--- a/lib/fhir_models/bootstrap/hashable.rb
+++ b/lib/fhir_models/bootstrap/hashable.rb
@@ -1,13 +1,12 @@
require 'bigdecimal'
module FHIR
module Hashable
-
def to_hash
- hash = Hash.new
+ hash = {}
self.class::METADATA.each do |key, value|
local_name = key
local_name = value['local_name'] if value['local_name']
- hash[key] = self.instance_variable_get("@#{local_name}")
+ hash[key] = instance_variable_get("@#{local_name}")
if hash[key].respond_to?(:to_hash)
hash[key] = hash[key].to_hash
elsif hash[key].is_a? Array
@@ -18,12 +17,12 @@ def to_hash
end
end
hash.keep_if do |_key, value|
- !value.nil? && ( (value.is_a?(Hash) && !value.empty?) ||
+ !value.nil? && ((value.is_a?(Hash) && !value.empty?) ||
(value.is_a?(Array) && !value.empty?) ||
(!value.is_a?(Hash) && !value.is_a?(Array))
)
end
- hash['resourceType'] = self.resourceType if self.respond_to?(:resourceType)
+ hash['resourceType'] = resourceType if respond_to?(:resourceType)
hash
end
@@ -32,51 +31,47 @@ def from_hash(hash)
self.class::METADATA.each do |key, value|
local_name = key
local_name = value['local_name'] if value['local_name']
- self.instance_variable_set("@#{local_name}", nil)
+ instance_variable_set("@#{local_name}", nil)
end
# set the variables to the hash values
hash.each do |key, value|
key = key.to_s
meta = self.class::METADATA[key]
- if !meta.nil?
- local_name = key
- local_name = meta['local_name'] if meta['local_name']
- self.instance_variable_set("@#{local_name}", value) rescue nil
- # inflate the value if it isn't a primitive
- klass = Module.const_get("FHIR::#{meta['type']}") rescue nil
- if !klass.nil? && !value.nil?
- # handle array of objects
- if value.is_a?(Array)
- value.map! do |child|
- obj = child
- unless [FHIR::RESOURCES, FHIR::TYPES].flatten.include? child.class.name.gsub('FHIR::', '')
- obj = make_child(child, klass)
- end
- obj
+ next if meta.nil?
+ local_name = key
+ local_name = meta['local_name'] if meta['local_name']
+ instance_variable_set("@#{local_name}", value) rescue nil
+ # inflate the value if it isn't a primitive
+ klass = Module.const_get("FHIR::#{meta['type']}") rescue nil
+ if !klass.nil? && !value.nil?
+ # handle array of objects
+ if value.is_a?(Array)
+ value.map! do |child|
+ obj = child
+ unless [FHIR::RESOURCES, FHIR::TYPES].flatten.include? child.class.name.gsub('FHIR::', '')
+ obj = make_child(child, klass)
end
- else # handle single object
- value = make_child(value, klass)
- # if there is only one of these, but cardinality allows more, we need to wrap it in an array.
- value = [ value ] if(value && (meta['max'] > 1))
- end
- self.instance_variable_set("@#{local_name}", value)
- elsif !FHIR::PRIMITIVES.include?(meta['type']) && meta['type']!='xhtml'
- FHIR.logger.error("Unhandled and unrecognized class/type: #{meta['type']}")
- else
- # primitive
- if value.is_a?(Array)
- # array of primitives
- value.map!{|child| convert_primitive(child, meta)}
- self.instance_variable_set("@#{local_name}", value)
- else
- # single primitive
- value = convert_primitive(value, meta)
- # if there is only one of these, but cardinality allows more, we need to wrap it in an array.
- value = [ value ] if(value && (meta['max'] > 1))
- self.instance_variable_set("@#{local_name}", value)
+ obj
end
- end # !klass && !nil?
- end # !meta.nil?
+ else # handle single object
+ value = make_child(value, klass)
+ # if there is only one of these, but cardinality allows more, we need to wrap it in an array.
+ value = [value] if value && (meta['max'] > 1)
+ end
+ instance_variable_set("@#{local_name}", value)
+ elsif !FHIR::PRIMITIVES.include?(meta['type']) && meta['type'] != 'xhtml'
+ FHIR.logger.error("Unhandled and unrecognized class/type: #{meta['type']}")
+ elsif value.is_a?(Array)
+ # array of primitives
+ value.map! { |child| convert_primitive(child, meta) }
+ instance_variable_set("@#{local_name}", value)
+ else
+ # single primitive
+ value = convert_primitive(value, meta)
+ # if there is only one of these, but cardinality allows more, we need to wrap it in an array.
+ value = [value] if value && (meta['max'] > 1)
+ instance_variable_set("@#{local_name}", value)
+ end # !klass && !nil?
end # hash loop
self
end
@@ -94,21 +89,16 @@ def make_child(child, klass)
end
def convert_primitive(value, meta)
- return value if !value.is_a?(String)
+ return value unless value.is_a?(String)
rval = value
- if meta['type']=='boolean'
- rval = false
- rval = true if value.strip=='true'
+ if meta['type'] == 'boolean'
+ rval = value.strip == 'true'
elsif FHIR::PRIMITIVES.include?(meta['type'])
- primitive_meta = FHIR::PRIMITIVES[ meta['type'] ]
+ primitive_meta = FHIR::PRIMITIVES[meta['type']]
if primitive_meta['type'] == 'number'
rval = BigDecimal.new(value.to_s)
- if rval.frac==0
- rval = rval.to_i
- else
- rval = rval.to_f
- end
+ rval = rval.frac.zero? ? rval.to_i : rval.to_f
end # primitive is number
end # boolean else
rval
diff --git a/lib/fhir_models/bootstrap/json.rb b/lib/fhir_models/bootstrap/json.rb
index 9478adcb8..7cbe451d6 100644
--- a/lib/fhir_models/bootstrap/json.rb
+++ b/lib/fhir_models/bootstrap/json.rb
@@ -1,13 +1,11 @@
module FHIR
module Json
-
#
# This module includes methods to serialize or deserialize FHIR resources to and from JSON.
#
def to_json
- hash = self.to_hash
- JSON.pretty_unparse(hash)
+ JSON.pretty_unparse(to_hash)
end
def self.from_json(json)
@@ -23,6 +21,5 @@ def self.from_json(json)
end
resource
end
-
end
end
diff --git a/lib/fhir_models/bootstrap/model.rb b/lib/fhir_models/bootstrap/model.rb
index 1ffd0b69c..5358467eb 100644
--- a/lib/fhir_models/bootstrap/model.rb
+++ b/lib/fhir_models/bootstrap/model.rb
@@ -5,12 +5,11 @@
module FHIR
class Model
-
- def initialize(hash={})
- self.from_hash(hash)
+ def initialize(hash = {})
+ from_hash(hash)
self.class::METADATA.each do |key, value|
- if value['max'] > 1 && self.instance_variable_get("@#{key}").nil?
- self.instance_variable_set("@#{key}".to_sym, [])
+ if value['max'] > 1 && instance_variable_get("@#{key}").nil?
+ instance_variable_set("@#{key}".to_sym, [])
end
end
end
@@ -19,55 +18,47 @@ def method_missing(method, *args, &block)
if defined?(self.class::MULTIPLE_TYPES) && self.class::MULTIPLE_TYPES[method.to_s]
self.class::MULTIPLE_TYPES[method.to_s].each do |type|
type[0] = type[0].upcase
- value = self.method("#{method}#{type}").call()
- return value if !value.nil?
+ value = self.method("#{method}#{type}").call
+ return value unless value.nil?
end
return nil
- elsif (!@extension.nil? && !@extension.empty?)
+ elsif !@extension.nil? && !@extension.empty?
ext = @extension.select do |x|
name = x.url.tr('-', '_').split('/').last
anchor = name.split('#').last
- (method.to_s==name || method.to_s==anchor)
+ (method.to_s == name || method.to_s == anchor)
end
- if !ext.first.nil?
- if !ext.first.value.nil?
- return ext.first.value
- else
- return ext.first
- end
+ unless ext.first.nil?
+ return ext.first.value.nil? ? ext.first : ext.first.value
end
- elsif (!@modifierExtension.nil? && !@modifierExtension.empty?)
+ elsif !@modifierExtension.nil? && !@modifierExtension.empty?
ext = @modifierExtension.select do |x|
name = x.url.tr('-', '_').split('/').last
anchor = name.split('#').last
- (method.to_s==name || method.to_s==anchor)
+ (method.to_s == name || method.to_s == anchor)
end
- if !ext.first.nil?
- if !ext.first.value.nil?
- return ext.first.value
- else
- return ext.first
- end
+ unless ext.first.nil?
+ return ext.first.value.nil? ? ext.first : ext.first.value
end
end
super(method, *args, &block)
end
def to_reference
- FHIR::Reference.new(reference: "#{self.class.name.split('::').last}/#{self.id}")
+ FHIR::Reference.new(reference: "#{self.class.name.split('::').last}/#{id}")
end
- def equals?(other, exclude=[])
+ def equals?(other, exclude = [])
(self.class::METADATA.keys - exclude).each do |attribute|
- return false unless compare_attribute(self.instance_variable_get("@#{attribute}".to_sym), other.instance_variable_get("@#{attribute}".to_sym), exclude)
+ return false unless compare_attribute(instance_variable_get("@#{attribute}".to_sym), other.instance_variable_get("@#{attribute}".to_sym), exclude)
end
true
end
- def mismatch(other, exclude=[])
+ def mismatch(other, exclude = [])
misses = []
(self.class::METADATA.keys - exclude).each do |key|
- these = attribute_mismatch(self.instance_variable_get("@#{key}".to_sym), other.instance_variable_get("@#{key}".to_sym), exclude)
+ these = attribute_mismatch(instance_variable_get("@#{key}".to_sym), other.instance_variable_get("@#{key}".to_sym), exclude)
if !these || (these.is_a?(Array) && !these.empty?)
misses << "#{self.class}::#{key}"
misses.concat these if these.is_a?(Array)
@@ -76,7 +67,7 @@ def mismatch(other, exclude=[])
misses
end
- def attribute_mismatch(left, right, exclude=[])
+ def attribute_mismatch(left, right, exclude = [])
if left.respond_to?(:mismatch) && right.respond_to?(:mismatch)
left.mismatch right, exclude
else
@@ -84,12 +75,12 @@ def attribute_mismatch(left, right, exclude=[])
end
end
- def compare_attribute(left, right, exclude=[])
+ def compare_attribute(left, right, exclude = [])
if left.respond_to?(:equals?) && right.respond_to?(:equals?)
left.equals? right, exclude
elsif left.is_a?(Array) && right.is_a?(Array) && (left.length == right.length)
result = true
- (0...(left.length)).each {|i| result &&= compare_attribute(left[i], right[i], exclude)}
+ (0...(left.length)).each { |i| result &&= compare_attribute(left[i], right[i], exclude) }
result
else
left == right
@@ -100,12 +91,12 @@ def is_valid?
validate.empty?
end
- def validate(contained=nil)
+ def validate(contained = nil)
validate_profile(self.class::METADATA, contained)
end
- def validate_profile(metadata, contained=nil)
- contained_here = [ self.instance_variable_get('@contained'.to_sym) ].flatten
+ def validate_profile(metadata, contained = nil)
+ contained_here = [instance_variable_get('@contained'.to_sym)].flatten
contained_here << contained
contained_here = contained_here.flatten.compact
errors = {}
@@ -114,14 +105,14 @@ def validate_profile(metadata, contained=nil)
# this field has been 'sliced'
meta.each do |slice|
local_name = slice['local_name'] || field
- value = [ self.instance_variable_get("@#{local_name}".to_sym) ].flatten.compact
+ value = [instance_variable_get("@#{local_name}".to_sym)].flatten.compact
subset = [] # subset is the values associated with just this slice
- if slice['type']=='Extension'
- if slice['type_profiles']
- subset = value.select{|x|slice['type_profiles'].include?(x.url)}
- else
- subset = value
- end
+ if slice['type'] == 'Extension'
+ subset = if slice['type_profiles']
+ value.select { |x| slice['type_profiles'].include?(x.url) }
+ else
+ value
+ end
else
FHIR.logger.warn 'Validation not supported on slices (except for Extensions)'
end
@@ -129,7 +120,7 @@ def validate_profile(metadata, contained=nil)
end
else
local_name = meta['local_name'] || field
- value = [ self.instance_variable_get("@#{local_name}".to_sym) ].flatten.compact
+ value = [instance_variable_get("@#{local_name}".to_sym)].flatten.compact
validate_field(field, value, contained_here, meta, errors)
end
end # metadata.each
@@ -143,17 +134,17 @@ def validate_profile(metadata, contained=nil)
count += 1 if errors[typename]
# check which multiple data types are actually present, not just errors
# actually, this might be allowed depending on cardinality
- value = self.instance_variable_get("@#{typename}")
+ value = instance_variable_get("@#{typename}")
present << typename if !value.nil? || (value.is_a?(Array) && !value.empty?)
end
- errors[prefix] = ["#{prefix}[x]: more than one type present."] if(count > 1)
+ errors[prefix] = ["#{prefix}[x]: more than one type present."] if count > 1
# remove errors for suffixes that are not present
suffixes.each do |suffix|
typename = "#{prefix}#{suffix[0].upcase}#{suffix[1..-1]}"
- errors.delete(typename) if !present.include?(typename)
+ errors.delete(typename) unless present.include?(typename)
end
end
- errors.keep_if{|_k, v|(v && !v.empty?)}
+ errors.keep_if { |_k, v| (v && !v.empty?) }
end
# ----- validate a field -----
@@ -166,7 +157,7 @@ def validate_field(field, value, contained_here, meta, errors)
errors[field] = []
# check cardinality
count = value.length
- if !( count>=meta['min'] && count<=meta['max'] )
+ unless count >= meta['min'] && count <= meta['max']
errors[field] << "#{meta['path']}: invalid cardinality. Found #{count} expected #{meta['min']}..#{meta['max']}"
end
# check datatype
@@ -174,108 +165,105 @@ def validate_field(field, value, contained_here, meta, errors)
value.each do |v|
klassname = v.class.name.gsub('FHIR::', '')
# if the data type is a generic Resource, validate it
- if datatype=='Resource'
+ if datatype == 'Resource'
if FHIR::RESOURCES.include?(klassname)
validation = v.validate(contained_here)
- errors[field] << validation if !validation.empty?
+ errors[field] << validation unless validation.empty?
else
errors[field] << "#{meta['path']}: expected Resource, found #{klassname}"
end
# if the data type is a Reference, validate it, but also check the
# type_profiles metadata. For example, if it should be a Reference(Patient)
- elsif datatype=='Reference'
- if klassname=='Reference'
+ elsif datatype == 'Reference'
+ if klassname == 'Reference'
validation = v.validate(contained_here)
- errors[field] << validation if !validation.empty?
+ errors[field] << validation unless validation.empty?
validate_reference_type(v, meta, contained_here, errors[field])
else
errors[field] << "#{meta['path']}: expected Reference, found #{klassname}"
end
# if the data type is a particular resource or complex type
- elsif (FHIR::RESOURCES.include?(datatype) || FHIR::TYPES.include?(datatype))
- if datatype==klassname
+ elsif FHIR::RESOURCES.include?(datatype) || FHIR::TYPES.include?(datatype)
+ if datatype == klassname
validation = v.validate(contained_here)
- errors[field] << validation if !validation.empty?
+ errors[field] << validation unless validation.empty?
else
errors[field] << "#{meta['path']}: incorrect type. Found #{klassname} expected #{datatype}"
end
# if the data type is a primitive, test the regular expression (if any)
elsif FHIR::PRIMITIVES.include?(datatype)
primitive_meta = FHIR::PRIMITIVES[datatype]
- if primitive_meta['regex'] && primitive_meta['type']!='number'
+ if primitive_meta['regex'] && primitive_meta['type'] != 'number'
match = (v =~ Regexp.new(primitive_meta['regex']))
errors[field] << "#{meta['path']}: #{v} does not match #{datatype} regex" if match.nil?
else
- errors[field] << "#{meta['path']}: #{v} is not a valid #{datatype}" if !is_primitive?(datatype, v)
+ errors[field] << "#{meta['path']}: #{v} is not a valid #{datatype}" unless is_primitive?(datatype, v)
end
end
# check binding
- if meta['binding']
- if meta['binding']['strength']=='required'
- the_codes = [ v ]
- if meta['type']=='Coding'
- the_codes = [ v.code ]
- elsif meta['type']=='CodeableConcept'
- the_codes = v.coding.map{|c|c.code}.compact
- end
- has_valid_code = false
- if meta['valid_codes']
- meta['valid_codes'].each do |_key, codes|
- has_valid_code = true if !(codes&the_codes).empty?
- break if has_valid_code
- end
- else
- the_codes.each do |code|
- has_valid_code = true if check_binding(meta['binding']['uri'], code)
- break if has_valid_code
- end
- end
- errors[field] << "#{meta['path']}: invalid codes #{the_codes}" if !has_valid_code
+ next unless meta['binding']
+ next unless meta['binding']['strength'] == 'required'
+ the_codes = [v]
+ if meta['type'] == 'Coding'
+ the_codes = [v.code]
+ elsif meta['type'] == 'CodeableConcept'
+ the_codes = v.coding.map(&:code).compact
+ end
+ has_valid_code = false
+ if meta['valid_codes']
+ meta['valid_codes'].each do |_key, codes|
+ has_valid_code = true unless (codes & the_codes).empty?
+ break if has_valid_code
+ end
+ else
+ the_codes.each do |code|
+ has_valid_code = true if check_binding(meta['binding']['uri'], code)
+ break if has_valid_code
end
end
+ errors[field] << "#{meta['path']}: invalid codes #{the_codes}" unless has_valid_code
end # value.each
errors.delete(field) if errors[field].empty?
end
def validate_reference_type(ref, meta, contained_here, errors)
- if ref.reference && meta['type_profiles']
- matches_one_profile = false
- meta['type_profiles'].each do |p|
- basetype = p.split('/').last
- matches_one_profile = true if ref.reference.include?(basetype)
- # check profiled resources
- profile_basetype = FHIR::Definitions.get_basetype(p)
- matches_one_profile = true if profile_basetype && ref.reference.include?(profile_basetype)
- end
- matches_one_profile = true if meta['type_profiles'].include?('http://hl7.org/fhir/StructureDefinition/Resource')
- if !matches_one_profile && ref.reference.start_with?('#')
- # we need to look at the local contained resources
- r = contained_here.find{|x|x.id==ref.reference[1..-1]}
- if !r.nil?
- meta['type_profiles'].each do |p|
- p = p.split('/').last
- matches_one_profile = true if r.resourceType==p
- end
- else
- FHIR.logger.warn "Unable to resolve reference #{ref.reference}"
+ return unless ref.reference && meta['type_profiles']
+ matches_one_profile = false
+ meta['type_profiles'].each do |p|
+ basetype = p.split('/').last
+ matches_one_profile = true if ref.reference.include?(basetype)
+ # check profiled resources
+ profile_basetype = FHIR::Definitions.get_basetype(p)
+ matches_one_profile = true if profile_basetype && ref.reference.include?(profile_basetype)
+ end
+ matches_one_profile = true if meta['type_profiles'].include?('http://hl7.org/fhir/StructureDefinition/Resource')
+ if !matches_one_profile && ref.reference.start_with?('#')
+ # we need to look at the local contained resources
+ r = contained_here.find { |x| x.id == ref.reference[1..-1] }
+ if !r.nil?
+ meta['type_profiles'].each do |p|
+ p = p.split('/').last
+ matches_one_profile = true if r.resourceType == p
end
+ else
+ FHIR.logger.warn "Unable to resolve reference #{ref.reference}"
end
- errors << "#{meta['path']}: incorrect Reference type, expected #{meta['type_profiles'].map{|x|x.split('/').last}.join('|')}" if !matches_one_profile
end
+ errors << "#{meta['path']}: incorrect Reference type, expected #{meta['type_profiles'].map { |x| x.split('/').last }.join('|')}" unless matches_one_profile
end
def is_primitive?(datatype, value)
# Remaining data types: handle special cases before checking type StructureDefinitions
case datatype.downcase
when 'boolean'
- value==true || value==false || value.downcase=='true' || value.downcase=='false'
+ value == true || value == false || value.downcase == 'true' || value.downcase == 'false'
when 'code'
- value.is_a?(String) && value.size>=1 && value.size==value.rstrip.size
+ value.is_a?(String) && value.size >= 1 && value.size == value.rstrip.size
when 'string', 'markdown'
value.is_a?(String)
when 'xhtml'
fragment = Nokogiri::HTML::DocumentFragment.parse(value)
- value.is_a?(String) && fragment.errors.size == 0
+ value.is_a?(String) && fragment.errors.size.zero?
when 'base64binary'
regex = /[^0-9\+\/\=A-Za-z\r\n ]/
value.is_a?(String) && (regex =~ value).nil?
@@ -298,13 +286,13 @@ def is_primitive?(datatype, value)
def check_binding(uri, value)
valid = false
- if uri=='http://hl7.org/fhir/ValueSet/content-type' || uri=='http://www.rfc-editor.org/bcp/bcp13.txt'
+ if uri == 'http://hl7.org/fhir/ValueSet/content-type' || uri == 'http://www.rfc-editor.org/bcp/bcp13.txt'
matches = MIME::Types[value]
json_or_xml = value.downcase.include?('xml') || value.downcase.include?('json')
known_weird = ['application/cql+text'].include?(value)
valid = json_or_xml || known_weird || (!matches.nil? && !matches.empty?)
- elsif uri=='http://hl7.org/fhir/ValueSet/languages' || uri=='http://tools.ietf.org/html/bcp47'
- has_region = (!(value =~ /-/).nil?)
+ elsif uri == 'http://hl7.org/fhir/ValueSet/languages' || uri == 'http://tools.ietf.org/html/bcp47'
+ has_region = !(value =~ /-/).nil?
valid = !BCP47::Language.identify(value.downcase).nil? && (!has_region || !BCP47::Region.identify(value.upcase).nil?)
else
FHIR.logger.warn "Unable to check_binding on unknown ValueSet: #{uri}"
@@ -313,6 +301,5 @@ def check_binding(uri, value)
end
private :validate_reference_type, :is_primitive?, :check_binding, :validate_field
-
end
end
diff --git a/lib/fhir_models/bootstrap/preprocess.rb b/lib/fhir_models/bootstrap/preprocess.rb
index 809d929ca..de5f9713e 100644
--- a/lib/fhir_models/bootstrap/preprocess.rb
+++ b/lib/fhir_models/bootstrap/preprocess.rb
@@ -2,7 +2,6 @@
module FHIR
module Boot
class Preprocess
-
def self.pre_process_bundle(filename)
# Read the file
puts "Processing #{File.basename(filename)}..."
@@ -12,18 +11,17 @@ def self.pre_process_bundle(filename)
# Remove entries that do not interest us: CompartmentDefinitions, OperationDefinitions, Conformance statements
hash['entry'].select! do |entry|
- ['StructureDefinition', 'ValueSet', 'CodeSystem', 'SearchParameter'].include? entry['resource']['resourceType']
+ %w(StructureDefinition ValueSet CodeSystem SearchParameter).include? entry['resource']['resourceType']
end
# Remove unnecessary elements from the hash
hash['entry'].each do |entry|
- if entry['resource']
- pre_process_structuredefinition(entry['resource']) if 'StructureDefinition'==entry['resource']['resourceType']
- pre_process_valueset(entry['resource']) if 'ValueSet'==entry['resource']['resourceType']
- pre_process_codesystem(entry['resource']) if 'CodeSystem'==entry['resource']['resourceType']
- pre_process_searchparam(entry['resource']) if 'SearchParameter'==entry['resource']['resourceType']
- remove_fhir_comments(entry['resource'])
- end
+ next unless entry['resource']
+ pre_process_structuredefinition(entry['resource']) if 'StructureDefinition' == entry['resource']['resourceType']
+ pre_process_valueset(entry['resource']) if 'ValueSet' == entry['resource']['resourceType']
+ pre_process_codesystem(entry['resource']) if 'CodeSystem' == entry['resource']['resourceType']
+ pre_process_searchparam(entry['resource']) if 'SearchParameter' == entry['resource']['resourceType']
+ remove_fhir_comments(entry['resource'])
end
# Output the post processed file
@@ -31,47 +29,45 @@ def self.pre_process_bundle(filename)
f.write(JSON.pretty_unparse(hash))
f.close
finish = File.size(filename)
- puts " Removed #{(start-finish) / 1024} KB" if (start!=finish)
+ puts " Removed #{(start - finish) / 1024} KB" if start != finish
end
def self.pre_process_structuredefinition(hash)
# Remove large HTML narratives and unused content
- ['text', 'publisher', 'contact', 'description', 'requirements', 'mapping'].each{|key| hash.delete(key) }
+ %w(text publisher contact description requirements mapping).each { |key| hash.delete(key) }
# Remove unused descriptions within the snapshot elements
- if(hash['snapshot'])
+ if hash['snapshot']
hash['snapshot']['element'].each do |element|
- ['short', 'definition', 'comments', 'requirements', 'alias', 'mapping'].each{|key| element.delete(key) }
+ %w(short definition comments requirements alias mapping).each { |key| element.delete(key) }
end
end
# Remove unused descriptions within the differential elements
- if(hash['differential'])
+ if hash['differential']
hash['differential']['element'].each do |element|
- ['short', 'definition', 'comments', 'requirements', 'alias', 'mapping'].each{|key| element.delete(key) }
+ %w(short definition comments requirements alias mapping).each { |key| element.delete(key) }
end
end
end
def self.pre_process_valueset(hash)
# Remove large HTML narratives and unused content
- ['meta', 'text', 'publisher', 'contact', 'description', 'requirements'].each{|key| hash.delete(key) }
+ %w(meta text publisher contact description requirements).each { |key| hash.delete(key) }
- if(hash['compose'] && hash['compose']['include'])
+ if hash['compose'] && hash['compose']['include']
hash['compose']['include'].each do |element|
- if(element['concept'])
- element['concept'].each do |concept|
- concept.delete('designation')
- end
+ next unless element['concept']
+ element['concept'].each do |concept|
+ concept.delete('designation')
end
end
end
- if(hash['compose'] && hash['compose']['exclude'])
+ if hash['compose'] && hash['compose']['exclude']
hash['compose']['exclude'].each do |element|
- if(element['concept'])
- element['concept'].each do |concept|
- concept.delete('designation')
- end
+ next unless element['concept']
+ element['concept'].each do |concept|
+ concept.delete('designation')
end
end
end
@@ -79,9 +75,9 @@ def self.pre_process_valueset(hash)
def self.pre_process_codesystem(hash)
# Remove large HTML narratives and unused content
- ['meta', 'text', 'publisher', 'contact', 'description', 'requirements'].each{|key| hash.delete(key) }
+ %w(meta text publisher contact description requirements).each { |key| hash.delete(key) }
- if(hash['concept'])
+ if hash['concept']
hash['concept'].each do |concept|
pre_process_codesystem_concept(concept)
end
@@ -89,7 +85,7 @@ def self.pre_process_codesystem(hash)
end
def self.pre_process_codesystem_concept(hash)
- ['extension', 'definition', 'designation'].each{|key| hash.delete(key) }
+ %w(extension definition designation).each { |key| hash.delete(key) }
if hash['concept']
hash['concept'].each do |concept|
pre_process_codesystem_concept(concept)
@@ -99,7 +95,7 @@ def self.pre_process_codesystem_concept(hash)
def self.pre_process_searchparam(hash)
# Remove large HTML narratives and unused content
- ['id', 'url', 'name', 'date', 'publisher', 'contact', 'description', 'xpathUsage'].each{|key| hash.delete(key) }
+ %w(id url name date publisher contact description xpathUsage).each { |key| hash.delete(key) }
end
def self.remove_fhir_comments(hash)
@@ -127,16 +123,15 @@ def self.pre_process_schema(filename)
# Remove annotations
doc = Nokogiri::XML(raw)
doc.root.add_namespace_definition('xs', 'http://www.w3.org/2001/XMLSchema')
- doc.search('//xs:annotation').each{ |e| e.remove }
+ doc.search('//xs:annotation').each(&:remove)
# Output the post processed file
f = File.open(filename, 'w:UTF-8')
f.write(doc.to_xml)
f.close
finish = File.size(filename)
- puts " Removed #{(start-finish) / 1024} KB" if (start!=finish)
+ puts " Removed #{(start - finish) / 1024} KB" if start != finish
end
-
end
end
end
diff --git a/lib/fhir_models/bootstrap/template.rb b/lib/fhir_models/bootstrap/template.rb
index 280291ce4..148ce0b11 100644
--- a/lib/fhir_models/bootstrap/template.rb
+++ b/lib/fhir_models/bootstrap/template.rb
@@ -11,7 +11,7 @@ class Template
attr_accessor :templates
attr_accessor :top_level
- def initialize(name=['Template'], top_level=false)
+ def initialize(name = ['Template'], top_level = false)
@name = name
@hierarchy = []
@kind = nil
@@ -32,7 +32,7 @@ def get_metadata
if metadata[field.name].is_a?(Array)
metadata[field.name] << x
else
- metadata[field.name] = [ metadata[field.name], x ]
+ metadata[field.name] = [metadata[field.name], x]
end
else
metadata[field.name] = field.serialize
@@ -42,32 +42,32 @@ def get_metadata
metadata
end
- def to_s(offset=0)
+ def to_s(offset = 0)
# create an array of Strings, one per line
s = []
- # TODO insert copyright statement
+ # TODO: insert copyright statement
# always declare the FHIR module
s << 'module FHIR' if @top_level
@name.each_with_index do |name, index|
- space = indent(index+1, offset)
+ space = indent(index + 1, offset)
type = 'module'
- type = 'class' if index==@name.length-1
+ type = 'class' if index == @name.length - 1
classdef = "#{space}#{type} #{name}"
classdef += ' < FHIR::Model' if type == 'class'
s << classdef
end
# include modules
- space = indent(@name.length+1, offset)
- s << "#{space}include FHIR::Hashable" if(@name.length > 0)
- s << "#{space}include FHIR::Json" if(@name.length > 0)
- s << "#{space}include FHIR::Xml" if(@name.length > 0)
+ space = indent(@name.length + 1, offset)
+ s << "#{space}include FHIR::Hashable" unless @name.empty?
+ s << "#{space}include FHIR::Json" unless @name.empty?
+ s << "#{space}include FHIR::Xml" unless @name.empty?
s << ''
# add mandatory METADATA constant
metadata = get_metadata
- @constants['METADATA'] = metadata if !metadata.empty?
+ @constants['METADATA'] = metadata unless metadata.empty?
# add constants
@constants.each do |constant, value|
@@ -89,7 +89,7 @@ def to_s(offset=0)
# add internal nested classes
@templates.each do |template|
- s << template.to_s(space.length-2)
+ s << template.to_s(space.length - 2)
s << ''
end
@@ -97,7 +97,7 @@ def to_s(offset=0)
max_name_size = 0
@fields.each do |f|
name = f.local_name || f.name
- max_name_size=name.length if(name.length > max_name_size)
+ max_name_size = name.length if name.length > max_name_size
end
max_name_size += 1
@@ -105,18 +105,18 @@ def to_s(offset=0)
@fields.each do |field|
s << "#{space}attr_accessor :"
local_name = field.local_name || field.name
- s[-1] << ("%-#{max_name_size}s" % "#{local_name}")
+ s[-1] << ("%-#{max_name_size}s" % local_name.to_s)
# add comment after field declaration
s[-1] << "# #{field.min}-#{field.max} "
- s[-1] << '[ ' if(field.max.to_i > 1 || field.max=='*')
+ s[-1] << '[ ' if field.max.to_i > 1 || field.max == '*'
s[-1] << field.type
- if field.type=='Reference'
- s[-1] << "(#{ field.type_profiles.map{|p|p.split('/').last}.join('|') })"
+ if field.type == 'Reference'
+ s[-1] << "(#{field.type_profiles.map { |p| p.split('/').last }.join('|')})"
end
- s[-1] << ' ]' if(field.max.to_i > 1 || field.max=='*')
+ s[-1] << ' ]' if field.max.to_i > 1 || field.max == '*'
end
- if @top_level && @kind=='resource'
+ if @top_level && @kind == 'resource'
s << ''
s << "#{space}def resourceType"
s << "#{space} '#{@name.first}'"
@@ -124,18 +124,17 @@ def to_s(offset=0)
end
# close all the class and module declarations
- (0..@name.length-1).reverse_each do |index|
- space = indent(index+1, offset)
+ (0..@name.length - 1).reverse_each do |index|
+ space = indent(index + 1, offset)
s << "#{space}end"
end
s << 'end' if @top_level
s.join("\n")
end
- def indent(level=0, offset)
- ' '*(offset) + ' '*(level)
+ def indent(level = 0, offset = 0)
+ ' ' * offset + ' ' * level
end
-
end
end
end
diff --git a/lib/fhir_models/bootstrap/xml.rb b/lib/fhir_models/bootstrap/xml.rb
index c847eb431..f388eb975 100644
--- a/lib/fhir_models/bootstrap/xml.rb
+++ b/lib/fhir_models/bootstrap/xml.rb
@@ -1,17 +1,16 @@
require 'nokogiri'
module FHIR
module Xml
-
#
# This module includes methods to serialize or deserialize FHIR resources to and from XML.
#
def to_xml
- hash = self.to_hash
+ hash = to_hash
hash.delete('resourceType')
doc = Nokogiri::XML::Document.new
doc.encoding = 'utf-8'
- doc.root = hash_to_xml_node(self.resourceType, hash, doc)
+ doc.root = hash_to_xml_node(resourceType, hash, doc)
doc.root.default_namespace = 'http://hl7.org/fhir'
doc.to_xml
end
@@ -33,8 +32,8 @@ def hash_to_xml_node(name, hash, doc)
end
hash.each do |key, value|
- next if(['extension', 'modifierExtension'].include?(name) && key=='url')
- next if(key == 'id' && !FHIR::RESOURCES.include?(name))
+ next if %w(extension modifierExtension).include?(name) && key == 'url'
+ next if key == 'id' && !FHIR::RESOURCES.include?(name)
if value.is_a?(Hash)
node.add_child(hash_to_xml_node(key, value, doc))
elsif value.is_a?(Array)
@@ -49,11 +48,11 @@ def hash_to_xml_node(name, hash, doc)
end
else
child = Nokogiri::XML::Node.new(key, doc)
- if(name=='text' && key=='div')
+ if name == 'text' && key == 'div'
child.set_attribute('xmlns', 'http://www.w3.org/1999/xhtml')
html = value.strip
if html.start_with?('
')
- html = html[html.index('>')+1..-7]
+ html = html[html.index('>') + 1..-7]
end
child.inner_html = html
else
@@ -62,7 +61,7 @@ def hash_to_xml_node(name, hash, doc)
node.add_child(child)
end
end
- node.set_attribute('url', hash['url']) if ['extension', 'modifierExtension'].include?(name)
+ node.set_attribute('url', hash['url']) if %w(extension modifierExtension).include?(name)
node.set_attribute('id', hash['id']) if hash['id'] && !FHIR::RESOURCES.include?(name)
node
end
@@ -91,7 +90,7 @@ def self.xml_node_to_hash(node)
next if [Nokogiri::XML::Text, Nokogiri::XML::Comment].include?(child.class)
key = child.name
- if node.name=='text' && key=='div'
+ if node.name == 'text' && key == 'div'
hash[key] = child.to_xml
else
value = child.get_attribute('value')
@@ -100,24 +99,23 @@ def self.xml_node_to_hash(node)
end
if hash[key]
- hash[key] = [ hash[key] ] unless hash[key].is_a?(Array)
+ hash[key] = [hash[key]] unless hash[key].is_a?(Array)
hash[key] << value
else
hash[key] = value
end
end
end
- hash['url'] = node.get_attribute('url') if ['extension', 'modifierExtension'].include?(node.name)
+ hash['url'] = node.get_attribute('url') if %w(extension modifierExtension).include?(node.name)
hash['id'] = node.get_attribute('id') if node.get_attribute('id') # Testscript fixture ids (applies to any BackboneElement)
hash['resourceType'] = node.name if FHIR::RESOURCES.include?(node.name)
- if( # If this hash contains nothing but an embedded resource, we should return that
- # embedded resource without the wrapper
- hash.keys.length==1 &&
- FHIR::RESOURCES.include?(hash.keys.first) &&
- hash.values.first.is_a?(Hash) &&
- hash.values.first['resourceType']==hash.keys.first
- )
+ # If this hash contains nothing but an embedded resource, we should return that
+ # embedded resource without the wrapper
+ if hash.keys.length == 1 &&
+ FHIR::RESOURCES.include?(hash.keys.first) &&
+ hash.values.first.is_a?(Hash) &&
+ hash.values.first['resourceType'] == hash.keys.first
hash.values.first
else
hash
@@ -137,6 +135,5 @@ def self.validate(xml)
private :hash_to_xml_node
private_class_method :xml_node_to_hash
-
end
end
diff --git a/lib/fhir_models/fhir_ext/structure_definition.rb b/lib/fhir_models/fhir_ext/structure_definition.rb
index a4cc17238..817a4292b 100644
--- a/lib/fhir_models/fhir_ext/structure_definition.rb
+++ b/lib/fhir_models/fhir_ext/structure_definition.rb
@@ -4,7 +4,6 @@
module FHIR
class StructureDefinition
-
attr_accessor :finding
attr_accessor :errors
attr_accessor :warnings
@@ -25,18 +24,18 @@ def is_compatible?(another_definition)
@finding.profileIdB = another_definition.id if another_definition.respond_to?(:id)
if !(another_definition.is_a? FHIR::StructureDefinition)
- @errors << @finding.error('', '', 'Not a StructureDefinition', 'StructureDefinition', "#{another_definition.class.name}")
+ @errors << @finding.error('', '', 'Not a StructureDefinition', 'StructureDefinition', another_definition.class.name.to_s)
return false
- elsif another_definition.snapshot.element[0].path!=snapshot.element[0].path
- @errors << @finding.error('', '', 'Incompatible resourceType', @finding.resourceType, "#{another_definition.snapshot.element[0].path}")
+ elsif another_definition.snapshot.element[0].path != snapshot.element[0].path
+ @errors << @finding.error('', '', 'Incompatible resourceType', @finding.resourceType, another_definition.snapshot.element[0].path.to_s)
return false
end
left_elements = Array.new(snapshot.element)
right_elements = Array.new(another_definition.snapshot.element)
- left_paths = left_elements.map { |e| e.path }
- right_paths = right_elements.map { |e| e.path }
+ left_paths = left_elements.map(&:path)
+ right_paths = right_elements.map(&:path)
# StructureDefinitions don't always include all base attributes (for example, of a ContactPoint)
# if nothing is modified from the base definition, so we have to add them in if they are missing.
@@ -52,8 +51,8 @@ def is_compatible?(another_definition)
add_missing_elements(another_definition.id, right_missing, right_elements, base_elements)
# update paths
- left_paths = left_elements.map { |e| e.path }
- right_paths = right_elements.map { |e| e.path }
+ left_paths = left_elements.map(&:path)
+ right_paths = right_elements.map(&:path)
# recalculate the missing attributes
left_missing = right_paths - left_paths
@@ -61,27 +60,25 @@ def is_compatible?(another_definition)
# generate warnings for missing fields (ignoring extensions)
left_missing.each do |e|
- if !e.include? 'extension'
- elem = get_element_by_path(e, right_elements)
- if !elem.min.nil? && elem.min > 0
- @errors << @finding.error(e, 'min', 'Missing REQUIRED element', 'Missing', "#{elem.min}")
- elsif elem.isModifier==true
- @errors << @finding.error(e, 'isModifier', 'Missing MODIFIER element', 'Missing', "#{elem.isModifier}")
- else
- @warnings << @finding.warning(e, '', 'Missing element', 'Missing', 'Defined')
- end
+ next if e.include? 'extension'
+ elem = get_element_by_path(e, right_elements)
+ if !elem.min.nil? && elem.min > 0
+ @errors << @finding.error(e, 'min', 'Missing REQUIRED element', 'Missing', elem.min.to_s)
+ elsif elem.isModifier == true
+ @errors << @finding.error(e, 'isModifier', 'Missing MODIFIER element', 'Missing', elem.isModifier.to_s)
+ else
+ @warnings << @finding.warning(e, '', 'Missing element', 'Missing', 'Defined')
end
end
right_missing.each do |e|
- if !e.include? 'extension'
- elem = get_element_by_path(e, left_elements)
- if !elem.min.nil? && elem.min > 0
- @errors << @finding.error(e, 'min', 'Missing REQUIRED element', "#{elem.min}", 'Missing')
- elsif elem.isModifier==true
- @errors << @finding.error(e, 'isModifier', 'Missing MODIFIER element', "#{elem.isModifier}", 'Missing')
- else
- @warnings << @finding.warning(e, '', 'Missing element', 'Defined', 'Missing')
- end
+ next if e.include? 'extension'
+ elem = get_element_by_path(e, left_elements)
+ if !elem.min.nil? && elem.min > 0
+ @errors << @finding.error(e, 'min', 'Missing REQUIRED element', elem.min.to_s, 'Missing')
+ elsif elem.isModifier == true
+ @errors << @finding.error(e, 'isModifier', 'Missing MODIFIER element', elem.isModifier.to_s, 'Missing')
+ else
+ @warnings << @finding.warning(e, '', 'Missing element', 'Defined', 'Missing')
end
end
@@ -114,13 +111,13 @@ def is_compatible?(another_definition)
checked_extensions = []
left_extensions.each do |x|
y = get_extension(x.name, right_extensions)
- if !y.nil?
+ unless y.nil?
# both profiles share an extension with the same name
checked_extensions << x.name
compare_extension_definition(x, y, another_definition)
end
y = get_extension(x.type[0].profile, right_extensions)
- if !y.nil? && x.name!=y.name
+ if !y.nil? && x.name != y.name
# both profiles share the same extension definition but with a different name
checked_extensions << x.name
checked_extensions << y.name
@@ -130,13 +127,13 @@ def is_compatible?(another_definition)
right_extensions.each do |y|
next if checked_extensions.include?(y.name)
x = get_extension(y.name, left_extensions)
- if !x.nil?
+ unless x.nil?
# both profiles share an extension with the same name
checked_extensions << y.name
compare_extension_definition(x, y, another_definition)
end
x = get_extension(y.type[0].profile, left_extensions)
- if !x.nil? && x.name!=y.name && !checked_extensions.include?(x.name)
+ if !x.nil? && x.name != y.name && !checked_extensions.include?(x.name)
# both profiles share the same extension definition but with a different name
checked_extensions << x.name
checked_extensions << y.name
@@ -145,33 +142,30 @@ def is_compatible?(another_definition)
end
@errors.flatten!
@warnings.flatten!
- @errors.size==0
+ @errors.size.zero?
end
- def get_element_by_path(path, elements=snapshot.element)
- elements.each do |element|
- return element if element.path==path
- end
- nil
+ def get_element_by_path(path, elements = snapshot.element)
+ elements.detect { |element| element.path == path }
end
- def get_extension(extension, elements=snapshot.element)
+ def get_extension(extension, elements = snapshot.element)
elements.each do |element|
- if element.path.include?('extension') || element.type.map{|t|t.code}.include?('Extension')
- return element if element.name==extension || element.type.map{|t|t.profile}.include?(extension)
+ if element.path.include?('extension') || element.type.map(&:code).include?('Extension')
+ return element if element.name == extension || element.type.map(&:profile).include?(extension)
end
end
nil
end
- #private
+ # private
# name -- name of the profile we're fixing
# missing_paths -- list of paths that we're adding
# elements -- list of elements currently defined in the profile
# base_elements -- list of elements defined in the base resource the profile extends
def add_missing_elements(_name, missing_paths, elements, base_elements)
- variable_paths = elements.map{|e|e.path}.grep(/\[x\]/).map{|e|e[0..-4]}
- variable_paths << base_elements.map{|e|e.path}.grep(/\[x\]/).map{|e|e[0..-4]}
+ variable_paths = elements.map(&:path).grep(/\[x\]/).map { |e| e[0..-4] }
+ variable_paths << base_elements.map(&:path).grep(/\[x\]/).map { |e| e[0..-4] }
variable_paths.flatten!.uniq!
missing_paths.each do |path|
@@ -179,62 +173,60 @@ def add_missing_elements(_name, missing_paths, elements, base_elements)
next if path.include? 'extension'
# Skip the variable paths that end with "[x]"
- next if variable_paths.any?{|variable| path.starts_with?(variable)}
+ next if variable_paths.any? { |variable| path.starts_with?(variable) }
elem = get_element_by_path(path, base_elements)
- if !elem.nil?
+ unless elem.nil?
# _DEEP_ copy
elements << FHIR::ElementDefinition.from_fhir_json(elem.to_fhir_json)
next
end
x = path.split('.')
- root = x.first(x.size-1).join('.')
- if root.include? '.'
- # get the root element to fill in the details
- elem = get_element_by_path(root, elements)
- # get the data type definition to fill in the details
- # assume missing elements are from first data type (gross)
- next if elem.type.nil? || elem.type.empty?
- type_def = FHIR::Definitions.get_type_definition(elem.type[0].code)
- next if type_def.nil?
- type_elements = Array.new(type_def.snapshot.element)
- # _DEEP_ copy
- type_elements.map! do |e| #{|e| FHIR::ElementDefinition.from_fhir_json(e.to_fhir_json) }
- FHIR::ElementDefinition.from_fhir_json(e.to_fhir_json)
- end
- # Fix path names
- type_root = String.new(type_elements[0].path)
- type_elements.each { |e| e.path.gsub!(type_root, root) }
- # finally, add the missing element definitions
- # one by one -- only if they are not already present (i.e. do not override)
- type_elements.each do |z|
- y = get_element_by_path(z.path, elements)
- if y.nil?
- elements << z
- # else
- # @warnings << "StructureDefinition #{name} already contains #{z.path}"
- end
- end
- elements.uniq!
+ root = x.first(x.size - 1).join('.')
+ next unless root.include? '.'
+ # get the root element to fill in the details
+ elem = get_element_by_path(root, elements)
+ # get the data type definition to fill in the details
+ # assume missing elements are from first data type (gross)
+ next if elem.type.nil? || elem.type.empty?
+ type_def = FHIR::Definitions.get_type_definition(elem.type[0].code)
+ next if type_def.nil?
+ type_elements = Array.new(type_def.snapshot.element)
+ # _DEEP_ copy
+ type_elements.map! do |e| # {|e| FHIR::ElementDefinition.from_fhir_json(e.to_fhir_json) }
+ FHIR::ElementDefinition.from_fhir_json(e.to_fhir_json)
+ end
+ # Fix path names
+ type_root = String.new(type_elements[0].path)
+ type_elements.each { |e| e.path.gsub!(type_root, root) }
+ # finally, add the missing element definitions
+ # one by one -- only if they are not already present (i.e. do not override)
+ type_elements.each do |z|
+ y = get_element_by_path(z.path, elements)
+ next unless y.nil?
+ elements << z
+ # else
+ # @warnings << "StructureDefinition #{name} already contains #{z.path}"
+ end
+ elements.uniq!
# else
# @warnings << "StructureDefinition #{name} missing -- #{path}"
- end
end
end
- #private
+ # private
def compare_extension_definition(x, y, another_definition)
- x_profiles = x.type.map{|t|t.profile}
- y_profiles = y.type.map{|t|t.profile}
+ x_profiles = x.type.map(&:profile)
+ y_profiles = y.type.map(&:profile)
x_only = x_profiles - y_profiles
shared = x_profiles - x_only
- if !shared.nil? && shared.size==0
+ if !shared.nil? && shared.size.zero?
# same name, but different profiles
# maybe the profiles are the same, just with different URLs...
# ... so we have to compare them, if we can.
- @warnings << @finding.warning("#{x.path} (#{x.name})", 'type.profile', 'Different Profiles', "#{x_profiles}", "#{y_profiles}")
+ @warnings << @finding.warning("#{x.path} (#{x.name})", 'type.profile', 'Different Profiles', x_profiles.to_s, y_profiles.to_s)
x_extension = FHIR::Definitions.get_extension_definition(x.type[0].profile)
y_extension = FHIR::Definitions.get_extension_definition(y.type[0].profile)
if !x_extension.nil? && !y_extension.nil?
@@ -249,139 +241,139 @@ def compare_extension_definition(x, y, another_definition)
end
end
- #private
+ # private
def compare_element_definitions(x, y, another_definition)
return if x.nil? || y.nil? || another_definition.nil?
# check cardinality
x_min = x.min || 0
- x_max = (x.max == '*') ? Float::INFINITY : x.max.to_i
+ x_max = x.max == '*' ? Float::INFINITY : x.max.to_i
y_min = y.min || 0
- y_max = (y.max == '*') ? Float::INFINITY : y.max.to_i
+ y_max = y.max == '*' ? Float::INFINITY : y.max.to_i
if x_min.nil? || x.max.nil? || y_min.nil? || y.max.nil?
- @errors << @finding.error("#{x.path}", 'min/max', 'Unknown cardinality', "#{x_min}..#{x.max}", "#{y_min}..#{y.max}")
+ @errors << @finding.error(x.path.to_s, 'min/max', 'Unknown cardinality', "#{x_min}..#{x.max}", "#{y_min}..#{y.max}")
elsif (x_min > y_max) || (x_max < y_min)
- @errors << @finding.error("#{x.path}", 'min/max', 'Incompatible cardinality', "#{x_min}..#{x.max}", "#{y_min}..#{y.max}")
+ @errors << @finding.error(x.path.to_s, 'min/max', 'Incompatible cardinality', "#{x_min}..#{x.max}", "#{y_min}..#{y.max}")
elsif (x_min != y_min) || (x_max != y_max)
- @warnings << @finding.warning("#{x.path}", 'min/max', 'Inconsistent cardinality', "#{x_min}..#{x.max}", "#{y_min}..#{y.max}")
+ @warnings << @finding.warning(x.path.to_s, 'min/max', 'Inconsistent cardinality', "#{x_min}..#{x.max}", "#{y_min}..#{y.max}")
end
# check data types
- x_types = x.type.map {|t| t.code }
- y_types = y.type.map {|t| t.code }
+ x_types = x.type.map(&:code)
+ y_types = y.type.map(&:code)
x_only = x_types - y_types
y_only = y_types - x_types
shared = x_types - x_only
- if !shared.nil? && shared.size==0 && x_types.size>0 && y_types.size>0 && x.constraint.size > 0 && y.constraint.size > 0
- @errors << @finding.error("#{x.path}", 'type.code', 'Incompatible data types', "#{x_types}", "#{y_types}")
+ if !shared.nil? && shared.size.zero? && !x_types.empty? && !y_types.empty? && !x.constraint.empty? && !y.constraint.empty?
+ @errors << @finding.error(x.path.to_s, 'type.code', 'Incompatible data types', x_types.to_s, y_types.to_s)
end
- if !x_only.nil? && x_only.size > 0
- @warnings << @finding.warning("#{x.path}", 'type.code', 'Allows additional data types', "#{x_only}", 'not allowed')
+ if !x_only.nil? && !x_only.empty?
+ @warnings << @finding.warning(x.path.to_s, 'type.code', 'Allows additional data types', x_only.to_s, 'not allowed')
end
- if !y_only.nil? && y_only.size > 0
- @warnings << @finding.warning("#{x.path}", 'type.code', 'Allows additional data types', 'not allowed', "#{y_only}")
+ if !y_only.nil? && !y_only.empty?
+ @warnings << @finding.warning(x.path.to_s, 'type.code', 'Allows additional data types', 'not allowed', y_only.to_s)
end
# check bindings
if x.binding.nil? && !y.binding.nil?
val = y.binding.valueSetUri || y.binding.valueSetReference.try(:reference) || y.binding.description
- @warnings << @finding.warning("#{x.path}", 'binding', 'Inconsistent binding', '', val)
+ @warnings << @finding.warning(x.path.to_s, 'binding', 'Inconsistent binding', '', val)
elsif !x.binding.nil? && y.binding.nil?
val = x.binding.valueSetUri || x.binding.valueSetReference.try(:reference) || x.binding.description
- @warnings << @finding.warning("#{x.path}", 'binding', 'Inconsistent binding', val, '')
+ @warnings << @finding.warning(x.path.to_s, 'binding', 'Inconsistent binding', val, '')
elsif !x.binding.nil? && !y.binding.nil?
x_vs = x.binding.valueSetUri || x.binding.valueSetReference.try(:reference)
y_vs = y.binding.valueSetUri || y.binding.valueSetReference.try(:reference)
if x_vs != y_vs
- if x.binding.strength=='required' || y.binding.strength=='required'
- @errors << @finding.error("#{x.path}", 'binding.strength', 'Incompatible bindings', "#{x.binding.strength} #{x_vs}", "#{y.binding.strength} #{y_vs}")
+ if x.binding.strength == 'required' || y.binding.strength == 'required'
+ @errors << @finding.error(x.path.to_s, 'binding.strength', 'Incompatible bindings', "#{x.binding.strength} #{x_vs}", "#{y.binding.strength} #{y_vs}")
else
- @warnings << @finding.warning("#{x.path}", 'binding.strength', 'Inconsistent bindings', "#{x.binding.strength} #{x_vs}", "#{y.binding.strength} #{y_vs}")
+ @warnings << @finding.warning(x.path.to_s, 'binding.strength', 'Inconsistent bindings', "#{x.binding.strength} #{x_vs}", "#{y.binding.strength} #{y_vs}")
end
end
end
# check default values
if x.defaultValue.try(:type) != y.defaultValue.try(:type)
- @errors << @finding.error("#{x.path}", 'defaultValue', 'Incompatible default type', "#{x.defaultValue.try(:type)}", "#{y.defaultValue.try(:type)}")
+ @errors << @finding.error(x.path.to_s, 'defaultValue', 'Incompatible default type', x.defaultValue.try(:type).to_s, y.defaultValue.try(:type).to_s)
end
if x.defaultValue.try(:value) != y.defaultValue.try(:value)
- @errors << @finding.error("#{x.path}", 'defaultValue', 'Incompatible default value', "#{x.defaultValue.try(:value)}", "#{y.defaultValue.try(:value)}")
+ @errors << @finding.error(x.path.to_s, 'defaultValue', 'Incompatible default value', x.defaultValue.try(:value).to_s, y.defaultValue.try(:value).to_s)
end
# check meaning when missing
if x.meaningWhenMissing != y.meaningWhenMissing
- @errors << @finding.error("#{x.path}", 'meaningWhenMissing', 'Inconsistent missing meaning', "#{x.meaningWhenMissing.tr(',', ';')}", "#{y.meaningWhenMissing.tr(',', ';')}")
+ @errors << @finding.error(x.path.to_s, 'meaningWhenMissing', 'Inconsistent missing meaning', x.meaningWhenMissing.tr(',', ';').to_s, y.meaningWhenMissing.tr(',', ';').to_s)
end
# check fixed values
if x.fixed.try(:type) != y.fixed.try(:type)
- @errors << @finding.error("#{x.path}", 'fixed', 'Incompatible fixed type', "#{x.fixed.try(:type)}", "#{y.fixed.try(:type)}")
+ @errors << @finding.error(x.path.to_s, 'fixed', 'Incompatible fixed type', x.fixed.try(:type).to_s, y.fixed.try(:type).to_s)
end
if x.fixed != y.fixed
xfv = x.fixed.try(:value)
xfv = xfv.to_xml.delete(/\n/) if x.fixed.try(:value).methods.include?(:to_xml)
yfv = y.fixed.try(:value)
yfv = yfv.to_xml.delete(/\n/) if y.fixed.try(:value).methods.include?(:to_xml)
- @errors << @finding.error("#{x.path}", 'fixed', 'Incompatible fixed value', "#{xfv}", "#{yfv}")
+ @errors << @finding.error(x.path.to_s, 'fixed', 'Incompatible fixed value', xfv.to_s, yfv.to_s)
end
# check min values
if x.min.try(:type) != y.min.try(:type)
- @errors << @finding.error("#{x.path}", 'min', 'Incompatible min type', "#{x.min.try(:type)}", "#{y.min.try(:type)}")
+ @errors << @finding.error(x.path.to_s, 'min', 'Incompatible min type', x.min.try(:type).to_s, y.min.try(:type).to_s)
end
if x.min.try(:value) != y.min.try(:value)
- @errors << @finding.error("#{x.path}", 'min', 'Incompatible min value', "#{x.min.try(:value)}", "#{y.min.try(:value)}")
+ @errors << @finding.error(x.path.to_s, 'min', 'Incompatible min value', x.min.try(:value).to_s, y.min.try(:value).to_s)
end
# check max values
if x.max.try(:type) != y.max.try(:type)
- @errors << @finding.error("#{x.path}", 'max', 'Incompatible max type', "#{x.max.try(:type)}", "#{y.max.try(:type)}")
+ @errors << @finding.error(x.path.to_s, 'max', 'Incompatible max type', x.max.try(:type).to_s, y.max.try(:type).to_s)
end
if x.max.try(:value) != y.max.try(:value)
- @errors << @finding.error("#{x.path}", 'max', 'Incompatible max value', "#{x.max.try(:value)}", "#{y.max.try(:value)}")
+ @errors << @finding.error(x.path.to_s, 'max', 'Incompatible max value', x.max.try(:value).to_s, y.max.try(:value).to_s)
end
# check pattern values
if x.pattern.try(:type) != y.pattern.try(:type)
- @errors << @finding.error("#{x.path}", 'pattern', 'Incompatible pattern type', "#{x.pattern.try(:type)}", "#{y.pattern.try(:type)}")
+ @errors << @finding.error(x.path.to_s, 'pattern', 'Incompatible pattern type', x.pattern.try(:type).to_s, y.pattern.try(:type).to_s)
end
if x.pattern.try(:value) != y.pattern.try(:value)
- @errors << @finding.error("#{x.path}", 'pattern', 'Incompatible pattern value', "#{x.pattern.try(:value)}", "#{y.pattern.try(:value)}")
+ @errors << @finding.error(x.path.to_s, 'pattern', 'Incompatible pattern value', x.pattern.try(:value).to_s, y.pattern.try(:value).to_s)
end
# maxLength (for Strings)
if x.maxLength != y.maxLength
- @warnings << @finding.warning("#{x.path}", 'maxLength', 'Inconsistent maximum length', "#{x.maxLength}", "#{y.maxLength}")
+ @warnings << @finding.warning(x.path.to_s, 'maxLength', 'Inconsistent maximum length', x.maxLength.to_s, y.maxLength.to_s)
end
# constraints
- x_constraints = x.constraint.map {|t| t.xpath }
- y_constraints = y.constraint.map {|t| t.xpath }
+ x_constraints = x.constraint.map(&:xpath)
+ y_constraints = y.constraint.map(&:xpath)
x_only = x_constraints - y_constraints
y_only = y_constraints - x_constraints
shared = x_constraints - x_only
- if !shared.nil? && shared.size==0 && x.constraint.size > 0 && y.constraint.size > 0
- @errors << @finding.error("#{x.path}", 'constraint.xpath', 'Incompatible constraints', "#{x_constraints.map{|z|z.tr(',', ';')}.join(' && ')}", "#{y_constraints.map{|z|z.tr(',', ';')}.join(' && ')}")
+ if !shared.nil? && shared.size.zero? && !x.constraint.empty? && !y.constraint.empty?
+ @errors << @finding.error(x.path.to_s, 'constraint.xpath', 'Incompatible constraints', x_constraints.map { |z| z.tr(',', ';') }.join(' && ').to_s, y_constraints.map { |z| z.tr(',', ';') }.join(' && ').to_s)
end
- if !x_only.nil? && x_only.size > 0
- @errors << @finding.error("#{x.path}", 'constraint.xpath', 'Additional constraints', "#{x_constraints.map{|z|z.tr(',', ';')}.join(' && ')}", '')
+ if !x_only.nil? && !x_only.empty?
+ @errors << @finding.error(x.path.to_s, 'constraint.xpath', 'Additional constraints', x_constraints.map { |z| z.tr(',', ';') }.join(' && ').to_s, '')
end
- if !y_only.nil? && y_only.size > 0
- @errors << @finding.error("#{x.path}", 'constraint.xpath', 'Additional constraints', '', "#{y_constraints.map{|z|z.tr(',', ';')}.join(' && ')}")
+ if !y_only.nil? && !y_only.empty?
+ @errors << @finding.error(x.path.to_s, 'constraint.xpath', 'Additional constraints', '', y_constraints.map { |z| z.tr(',', ';') }.join(' && ').to_s)
end
# mustSupports
if x.mustSupport != y.mustSupport
- @warnings << @finding.warning("#{x.path}", 'mustSupport', 'Inconsistent mustSupport', "#{x.mustSupport || false}", "#{y.mustSupport || false}")
+ @warnings << @finding.warning(x.path.to_s, 'mustSupport', 'Inconsistent mustSupport', (x.mustSupport || false).to_s, (y.mustSupport || false).to_s)
end
# isModifier
if x.isModifier != y.isModifier
- @errors << @finding.error("#{x.path}", 'isModifier', 'Incompatible isModifier', "#{x.isModifier || false}", "#{y.isModifier || false}")
+ @errors << @finding.error(x.path.to_s, 'isModifier', 'Incompatible isModifier', (x.isModifier || false).to_s, (y.isModifier || false).to_s)
end
end
@@ -417,14 +409,14 @@ def is_valid_json?(json)
base_type = snapshot.element[0].path
snapshot.element.each do |element|
path = element.path
- path = path[(base_type.size+1)..-1] if path.start_with? base_type
+ path = path[(base_type.size + 1)..-1] if path.start_with? base_type
nodes = get_json_nodes(json, path)
# special filtering on extension urls
- extension_profile = element.type.find{|t|t.code=='Extension' && !t.profile.nil? && !t.profile.empty?}
+ extension_profile = element.type.find { |t| t.code == 'Extension' && !t.profile.nil? && !t.profile.empty? }
if extension_profile
- nodes.keep_if{|x| extension_profile.profile.include?(x['url']) }
+ nodes.keep_if { |x| extension_profile.profile.include?(x['url']) }
end
# Check the cardinality
@@ -440,7 +432,7 @@ def is_valid_json?(json)
end
# Check the datatype for each node, only if the element has one declared, and it isn't the root element
- if element.type.size > 0 && element.path!=id
+ if !element.type.empty? && element.path != id
nodes.each do |value|
matching_type = 0
@@ -449,35 +441,35 @@ def is_valid_json?(json)
element.type.each do |type|
data_type_code = type.code
verified_extension = false
- if data_type_code=='Extension' && !type.profile.empty?
+ if data_type_code == 'Extension' && !type.profile.empty?
extension_def = FHIR::Definitions.get_extension_definition(value['url'])
if extension_def
verified_extension = extension_def.validates_resource?(FHIR::Extension.new(deep_copy(value)))
end
end
if verified_extension || is_data_type?(data_type_code, value)
- matching_type+=1
+ matching_type += 1
if data_type_code == 'code' # then check the binding
- if(!element.binding.nil?)
- matching_type+=check_binding(element, value)
+ unless element.binding.nil?
+ matching_type += check_binding(element, value)
end
- elsif data_type_code=='CodeableConcept' && !element.pattern.nil? && element.pattern.type=='CodeableConcept'
- # TODO check that the CodeableConcept matches the defined pattern
+ elsif data_type_code == 'CodeableConcept' && !element.pattern.nil? && element.pattern.type == 'CodeableConcept'
+ # TODO: check that the CodeableConcept matches the defined pattern
@warnings << "Ignoring defined patterns on CodeableConcept #{element.path}"
- elsif data_type_code=='String' && !element.maxLength.nil? && (value.size>element.maxLength)
+ elsif data_type_code == 'String' && !element.maxLength.nil? && (value.size > element.maxLength)
@errors << "#{element.path} exceed maximum length of #{element.maxLength}: #{value}"
end
else
temp_messages << "#{element.path} is not a valid #{data_type_code}: '#{value}'"
end
end
- if matching_type<=0
+ if matching_type <= 0
@errors += temp_messages
- @errors << "#{element.path} did not match one of the valid data types: #{element.type.map{|el|el.code}}"
+ @errors << "#{element.path} did not match one of the valid data types: #{element.type.map(&:code)}"
else
@warnings += temp_messages
end
- if !element.fixed.nil? && element.fixed!=value
+ if !element.fixed.nil? && element.fixed != value
@errors << "#{element.path} value of '#{value}' did not match fixed value: #{element.fixed}"
end
end
@@ -490,12 +482,12 @@ def is_valid_json?(json)
# consistent with the current context (element.path). For example, sometimes expressions appear to be
# written to be evaluated within the element, other times at the resource level, or perhaps
# elsewhere. There is no good way to determine "where" you should evaluate the expression.
- if !element.constraint.empty?
+ unless element.constraint.empty?
element.constraint.each do |constraint|
if constraint.expression && !nodes.empty?
begin
result = FluentPath.evaluate(constraint.expression, json)
- if !result && constraint.severity=='error'
+ if !result && constraint.severity == 'error'
@errors << "#{element.path}: FluentPath expression evaluates to false for #{name} invariant rule #{constraint.key}: #{constraint.human}"
end
rescue
@@ -504,10 +496,9 @@ def is_valid_json?(json)
end
end
end
-
end
- @errors.size==0
+ @errors.size.zero?
end
def get_json_nodes(json, path)
@@ -547,12 +538,12 @@ def is_data_type?(data_type_code, value)
# FHIR models covers any base Resources
if FHIR::RESOURCES.include?(data_type_code)
definition = FHIR::Definitions.get_resource_definition(data_type_code)
- if !definition.nil?
+ unless definition.nil?
ret_val = false
begin
klass = Module.const_get("FHIR::#{data_type_code}")
ret_val = definition.validates_resource?(klass.new(deep_copy(value)))
- if !ret_val
+ unless ret_val
@errors += definition.errors
@warnings += definition.warnings
end
@@ -568,14 +559,14 @@ def is_data_type?(data_type_code, value)
when 'domainresource'
true # we don't have to verify domain resource, because it will be included in the snapshot
when 'boolean'
- value==true || value==false || value.downcase=='true' || value.downcase=='false'
+ value == true || value == false || value.downcase == 'true' || value.downcase == 'false'
when 'code'
- value.is_a?(String) && value.size>=1 && value.size==value.rstrip.size
+ value.is_a?(String) && value.size >= 1 && value.size == value.rstrip.size
when 'string', 'markdown'
value.is_a?(String)
when 'xhtml'
fragment = Nokogiri::HTML::DocumentFragment.parse(value)
- value.is_a?(String) && fragment.errors.size == 0
+ value.is_a?(String) && fragment.errors.size.zero?
when 'base64binary'
regex = /[^0-9\+\/\=A-Za-z\r\n ]/
value.is_a?(String) && (regex =~ value).nil?
@@ -621,7 +612,7 @@ def is_data_type?(data_type_code, value)
begin
klass = Module.const_get("FHIR::#{resource_type}")
ret_val = definition.validates_resource?(klass.new(deep_copy(value)))
- if !ret_val
+ unless ret_val
@errors += definition.errors
@warnings += definition.warnings
end
@@ -635,7 +626,7 @@ def is_data_type?(data_type_code, value)
end
else
# Eliminate endless loop on Element is an Element
- return true if (data_type_code=='Element' && id=='Element')
+ return true if data_type_code == 'Element' && id == 'Element'
definition = FHIR::Definitions.get_type_definition(data_type_code)
definition = FHIR::Definitions.get_resource_definition(data_type_code) if definition.nil?
@@ -644,7 +635,7 @@ def is_data_type?(data_type_code, value)
begin
klass = Module.const_get("FHIR::#{data_type_code}")
ret_val = definition.validates_resource?(klass.new(deep_copy(value)))
- if !ret_val
+ unless ret_val
@errors += definition.errors
@warnings += definition.warnings
end
@@ -660,33 +651,32 @@ def is_data_type?(data_type_code, value)
end
def check_binding(element, value)
-
vs_uri = element.binding.valueSetUri || element.binding.valueSetReference.reference
valueset = FHIR::Definitions.get_codes(vs_uri)
matching_type = 0
- if vs_uri=='http://hl7.org/fhir/ValueSet/content-type' || vs_uri=='http://www.rfc-editor.org/bcp/bcp13.txt'
+ if vs_uri == 'http://hl7.org/fhir/ValueSet/content-type' || vs_uri == 'http://www.rfc-editor.org/bcp/bcp13.txt'
matches = MIME::Types[value]
- if (matches.nil? || matches.size==0) && !is_some_type_of_xml_or_json(value)
+ if (matches.nil? || matches.size.zero?) && !is_some_type_of_xml_or_json(value)
@errors << "#{element.path} has invalid mime-type: '#{value}'"
- matching_type-=1 if element.binding.strength=='required'
+ matching_type -= 1 if element.binding.strength == 'required'
end
- elsif vs_uri=='http://hl7.org/fhir/ValueSet/languages' || vs_uri=='http://tools.ietf.org/html/bcp47'
- has_region = (!(value =~ /-/).nil?)
+ elsif vs_uri == 'http://hl7.org/fhir/ValueSet/languages' || vs_uri == 'http://tools.ietf.org/html/bcp47'
+ has_region = !(value =~ /-/).nil?
valid = !BCP47::Language.identify(value.downcase).nil? && (!has_region || !BCP47::Region.identify(value.upcase).nil?)
- if !valid
+ unless valid
@errors << "#{element.path} has unrecognized language: '#{value}'"
- matching_type-=1 if element.binding.strength=='required'
+ matching_type -= 1 if element.binding.strength == 'required'
end
elsif valueset.nil?
@warnings << "#{element.path} has unknown ValueSet: '#{vs_uri}'"
- matching_type-=1 if element.binding.strength=='required'
+ matching_type -= 1 if element.binding.strength == 'required'
elsif !valueset.values.flatten.include?(value)
message = "#{element.path} has invalid code '#{value}' from #{valueset}"
- if element.binding.strength=='required'
+ if element.binding.strength == 'required'
@errors << message
- matching_type-=1
+ matching_type -= 1
else
@warnings << message
end
@@ -697,14 +687,13 @@ def check_binding(element, value)
def is_some_type_of_xml_or_json(code)
m = code.downcase
- return true if m=='xml' || m=='json'
+ return true if m == 'xml' || m == 'json'
return true if (m.starts_with?('application/') || m.starts_with?('text/')) && (m.ends_with?('json') || m.ends_with?('xml'))
- return true if (m.starts_with?('application/xml') || m.starts_with?('text/xml'))
- return true if (m.starts_with?('application/json') || m.starts_with?('text/json'))
+ return true if m.starts_with?('application/xml') || m.starts_with?('text/xml')
+ return true if m.starts_with?('application/json') || m.starts_with?('text/json')
false
end
private :is_valid_json?, :get_json_nodes, :is_data_type?, :check_binding, :add_missing_elements, :compare_element_definitions
-
end
end
diff --git a/lib/fhir_models/fhir_ext/structure_definition_finding.rb b/lib/fhir_models/fhir_ext/structure_definition_finding.rb
index 4d0d1c6cf..e79d247fb 100644
--- a/lib/fhir_models/fhir_ext/structure_definition_finding.rb
+++ b/lib/fhir_models/fhir_ext/structure_definition_finding.rb
@@ -63,6 +63,5 @@ def error(path, attribute, message, value_a, value_b)
obj.valueB = value_b
obj
end
-
end
end
diff --git a/lib/fhir_models/fluentpath/evaluate.rb b/lib/fhir_models/fluentpath/evaluate.rb
index 21a485adf..c365ada6a 100644
--- a/lib/fhir_models/fluentpath/evaluate.rb
+++ b/lib/fhir_models/fluentpath/evaluate.rb
@@ -1,10 +1,9 @@
module FluentPath
-
- @@context = Hash.new
+ @@context = {}
@@parent = nil
# This is the entry point to using the FluentPath class
- def self.evaluate(expression, hash, parent=nil)
+ def self.evaluate(expression, hash, parent = nil)
@@context = hash
@@parent = parent
tree = FluentPath.parse(expression)
@@ -16,36 +15,35 @@ def self.evaluate(expression, hash, parent=nil)
# self references
def self.get(key, hash)
return @@context if ['$context', '$resource'].include?(key)
- return @@parent if key=='$parent'
- return 'http://unitsofmeasure.org' if key=='%ucum'
- return 'http://snomed.info/sct' if key=='%sct'
- return 'http://loinc.org' if key=='%loinc'
+ return @@parent if key == '$parent'
+ return 'http://unitsofmeasure.org' if key == '%ucum'
+ return 'http://snomed.info/sct' if key == '%sct'
+ return 'http://loinc.org' if key == '%loinc'
return key.gsub!(/\A\'|\'\Z/, '') if key.start_with?("'") && key.end_with?("'")
key.gsub!(/\A"|"\Z/, '') # remove quotes around path if they exist
if hash.is_a?(Array)
response = []
hash.each do |e|
- if e.is_a?(Hash)
- item = e[key]
- if item.is_a?(Array)
- item.each{|i| response << i }
- else
- response << item
- end
+ next unless e.is_a?(Hash)
+ item = e[key]
+ if item.is_a?(Array)
+ item.each { |i| response << i }
+ else
+ response << item
end
end
return response
end
- return :null if !hash.is_a?(Hash)
- return hash if hash['resourceType']==key
+ return :null unless hash.is_a?(Hash)
+ return hash if hash['resourceType'] == key
val = hash[key]
if val.nil?
# this block is a dangerous hack to get fields of multiple data types
# e.g. 'value' instead of 'valueQuantity', or 'onset' instead of 'onsetDateTime' or 'onsetPeriod'
- nkey = hash.keys.select{|x|x.start_with?(key)}.first
- if !nkey.nil?
+ nkey = hash.keys.select { |x| x.start_with?(key) }.first
+ unless nkey.nil?
tail = nkey.gsub(key, '')
- val = hash[nkey] if (tail[0]==tail[0].capitalize)
+ val = hash[nkey] if tail[0] == tail[0].capitalize
end
end
val = :null if val.nil?
@@ -59,38 +57,38 @@ def self.convert_to_boolean(value)
return false if value.nil?
return false if value.is_a?(Array) && value.empty?
return false if value.is_a?(Hash) && value.empty?
- return false if value==:null
- return false if value==false
- return true
+ return false if value == :null
+ return false if value == false
+ true
end
def self.clean_index(tree, index)
- tree[index] = nil if !index.nil?
+ tree[index] = nil unless index.nil?
end
# evaluate a parsed expression given some context data
def self.compute(tree, data)
tree = tree.tree if tree.is_a?(FluentPath::Expression)
# --------------- OPERATOR PRECEDENCE ------------------
- #01 . (path/function invocation)
- #02 [] (indexer)
- #03 unary + and -
- #04: *, /, div, mod
- #05: +, -,
- #06: |
- #07: >, <, >=, <=
- #08: is, as
- #09: =, ~, !=, !~
- #10: in, contains
- #11: and
- #12: xor, or
- #13: implies
+ # 01 . (path/function invocation)
+ # 02 [] (indexer)
+ # 03 unary + and -
+ # 04: *, /, div, mod
+ # 05: +, -,
+ # 06: |
+ # 07: >, <, >=, <=
+ # 08: is, as
+ # 09: =, ~, !=, !~
+ # 10: in, contains
+ # 11: and
+ # 12: xor, or
+ # 13: implies
# evaluate all the data at this level
functions = [:where, :select, :extension, :children, :first, :last, :tail]
size = -1
substitutions = 1
- while(tree.length!=size || substitutions > 0)
+ while tree.length != size || substitutions > 0
substitutions = 0
FHIR.logger.debug "DATA: #{tree}"
previous_node = nil
@@ -102,9 +100,9 @@ def self.compute(tree, data)
if node.include?('[') && node.end_with?(']')
array_index = node[node.index('[')..-1].gsub(/\[|\]/, '')
t = get(array_index, data)
- t = array_index.to_i if(t.nil? || t==:null)
+ t = array_index.to_i if t.nil? || t == :null
array_index = t
- node = node[0..node.index('[')-1]
+ node = node[0..node.index('[') - 1]
end
if previous_node.is_a?(Hash) || previous_node.is_a?(Array)
tree[index] = get(node, previous_node)
@@ -122,13 +120,13 @@ def self.compute(tree, data)
when :where
# the previous node should be data (as Array or Hash)
# the next node should be a block or subexpression (as FluentPath::Expression)
- block = tree[index+1]
+ block = tree[index + 1]
if block.is_a?(FluentPath::Expression)
- tree[index+1] = nil
+ tree[index + 1] = nil
else
raise 'Where function requires a block.'
end
- previous_node = [] if previous_node==:null
+ previous_node = [] if previous_node == :null
if previous_node.is_a?(Array)
previous_node.keep_if do |item|
sub = compute(block.clone, item)
@@ -138,13 +136,8 @@ def self.compute(tree, data)
clean_index(tree, previous_index)
elsif previous_node.is_a?(Hash)
sub = compute(block, previous_node)
- if convert_to_boolean(sub)
- tree[index] = previous_node
- clean_index(tree, previous_index)
- else
- tree[index] = {}
- clean_index(tree, previous_index)
- end
+ tree[index] = convert_to_boolean(sub) ? previous_node : {}
+ clean_index(tree, previous_index)
else
raise "Where function not applicable to #{previous_node.class}: #{previous_node}"
end
@@ -153,13 +146,13 @@ def self.compute(tree, data)
# select is equivalent to ruby Array.map!
# the previous node should be data (as Array or Hash)
# the next node should be a block or subexpression (as FluentPath::Expression)
- block = tree[index+1]
+ block = tree[index + 1]
if block.is_a?(FluentPath::Expression)
- tree[index+1] = nil
+ tree[index + 1] = nil
else
raise 'Select function requires a block.'
end
- previous_node = [] if previous_node==:null
+ previous_node = [] if previous_node == :null
if previous_node.is_a?(Array)
previous_node.map! do |item|
compute(block.clone, item)
@@ -176,9 +169,9 @@ def self.compute(tree, data)
when :extension
# the previous node should be a data (as Hash)
# the next node optionally is a block or subexpression (as FluentPath::Expression)
- block = tree[index+1]
+ block = tree[index + 1]
if block.is_a?(FluentPath::Expression)
- tree[index+1] = nil
+ tree[index + 1] = nil
else
raise 'Extension function requires a block.'
end
@@ -192,7 +185,7 @@ def self.compute(tree, data)
rescue
raise 'Extension function requires a single URL as String.'
end
- ext = exts.select{|x|x['url']==url}.first
+ ext = exts.select { |x| x['url'] == url }.first
tree[index] = ext
clean_index(tree, previous_index)
else
@@ -208,10 +201,10 @@ def self.compute(tree, data)
if previous_node.is_a?(Hash)
tree[index] = previous_node.values
clean_index(tree, previous_index)
- substitutions+=1
+ substitutions += 1
elsif data.is_a?(Hash)
tree[index] = data.values
- substitutions+=1
+ substitutions += 1
else
raise "Children not applicable to #{previous_node.class}: #{previous_node}"
end
@@ -235,7 +228,7 @@ def self.compute(tree, data)
when :tail
# the previous node should be an Array of length > 1
if previous_node.is_a?(Array)
- tree[index] = previous_node.last(previous_node.length-1)
+ tree[index] = previous_node.last(previous_node.length - 1)
clean_index(tree, previous_index)
else
raise "Tail function is not applicable to #{previous_node.class}: #{previous_node}"
@@ -257,7 +250,7 @@ def self.compute(tree, data)
# evaluate all the functions at this level
functions = [:all, :not, :empty, :exists, :startsWith, :substring, :contains, :in, :distinct, :toInteger, :count]
size = -1
- while(tree.length!=size)
+ while tree.length != size
FHIR.logger.debug "FUNC: #{tree}"
previous_node = data
previous_index = nil
@@ -269,26 +262,25 @@ def self.compute(tree, data)
when :all
if previous_node.is_a?(Array)
result = true
- previous_node.each{|item| result = (result && convert_to_boolean(item))}
+ previous_node.each { |item| result = (result && convert_to_boolean(item)) }
tree[index] = result
- clean_index(tree, previous_index)
else
tree[index] = convert_to_boolean(previous_node)
- clean_index(tree, previous_index)
end
+ clean_index(tree, previous_index)
when :not
tree[index] = !convert_to_boolean(previous_node)
clean_index(tree, previous_index)
when :count
tree[index] = 0
- tree[index] = 1 if !previous_node.nil?
+ tree[index] = 1 unless previous_node.nil?
tree[index] = previous_node.length if previous_node.is_a?(Array)
clean_index(tree, previous_index)
when :empty
- tree[index] = (previous_node==:null || previous_node.empty? rescue previous_node.nil?)
+ tree[index] = (previous_node == :null || previous_node.empty? rescue previous_node.nil?)
clean_index(tree, previous_index)
when :exists
- tree[index] = !previous_node.nil? && previous_node!=:null
+ tree[index] = !previous_node.nil? && previous_node != :null
clean_index(tree, previous_index)
when :distinct
tree[index] = (previous_node.uniq rescue previous_node)
@@ -296,9 +288,9 @@ def self.compute(tree, data)
when :startsWith
# the previous node should be a data (as String)
# the next node should be a block or subexpression (as FluentPath::Expression)
- block = tree[index+1]
+ block = tree[index + 1]
if block.is_a?(FluentPath::Expression)
- tree[index+1] = nil
+ tree[index + 1] = nil
else
raise 'StartsWith function requires a block.'
end
@@ -314,9 +306,9 @@ def self.compute(tree, data)
when :substring
# the previous node should be a data (as String)
# the next node should be a block or subexpression (as FluentPath::Expression)
- block = tree[index+1]
+ block = tree[index + 1]
if block.is_a?(FluentPath::Expression)
- tree[index+1] = nil
+ tree[index + 1] = nil
else
raise 'Substring function requires a block.'
end
@@ -327,13 +319,13 @@ def self.compute(tree, data)
if args.is_a?(String) && args.include?(',')
args = args.split(',')
start = args.first.to_i
- length = args.last.to_i-1
+ length = args.last.to_i - 1
else
FHIR.logger.debug 'Evaling Substring Block....'
start = compute(block, data)
length = previous_node.length - start
end
- tree[index] = previous_node[start..(start+length)]
+ tree[index] = previous_node[start..(start + length)]
clean_index(tree, previous_index)
else
raise "Substring function not applicable to #{previous_node.class}: #{previous_node}"
@@ -342,9 +334,9 @@ def self.compute(tree, data)
when :contains
# the previous node should be a data (as String)
# the next node should be a block or subexpression (as FluentPath::Expression)
- block = tree[index+1]
+ block = tree[index + 1]
if block.is_a?(FluentPath::Expression)
- tree[index+1] = nil
+ tree[index + 1] = nil
else
raise 'Contains function requires a block.'
end
@@ -360,18 +352,18 @@ def self.compute(tree, data)
when :in
# the previous node should be a data (as String, Number, or Boolean)
# the next node should an Array (possibly as a block or subexpression/FluentPath::Expression)
- block = tree[index+1]
+ block = tree[index + 1]
if block.is_a?(FluentPath::Expression)
FHIR.logger.debug 'Evaling In Block....'
- tree[index+1] = compute(block, data)
+ tree[index + 1] = compute(block, data)
end
- array = tree[index+1]
+ array = tree[index + 1]
if array.is_a?(Array)
- tree[index+1] = nil
+ tree[index + 1] = nil
else
raise 'In function requires an array.'
end
- if previous_node.is_a?(String) || previous_node==true || previous_node==false || previous_node.is_a?(Numeric)
+ if previous_node.is_a?(String) || previous_node == true || previous_node == false || previous_node.is_a?(Numeric)
tree[index] = array.include?(previous_node) rescue false
clean_index(tree, previous_index)
else
@@ -403,7 +395,7 @@ def self.compute(tree, data)
# evaluate all mult/div
functions = [:"/", :"*"]
size = -1
- while(tree.length!=size)
+ while tree.length != size
FHIR.logger.debug "MATH: #{tree}"
previous_node = nil
previous_index = nil
@@ -411,17 +403,17 @@ def self.compute(tree, data)
tree.each_with_index do |node, index|
if node.is_a?(Symbol) && functions.include?(node)
previous_node = compute(previous_node, data) if previous_node.is_a?(FluentPath::Expression)
- tree[index+1] = compute(tree[index+1], data) if tree[index+1].is_a?(FluentPath::Expression)
+ tree[index + 1] = compute(tree[index + 1], data) if tree[index + 1].is_a?(FluentPath::Expression)
left = previous_node
- right = tree[index+1]
+ right = tree[index + 1]
case node
when :"/"
- tree[index] = (left/right)
+ tree[index] = (left / right)
when :"*"
- tree[index] = (left*right)
+ tree[index] = (left * right)
end
tree[previous_index] = nil
- tree[index+1] = nil
+ tree[index + 1] = nil
break
end
previous_index = index
@@ -434,7 +426,7 @@ def self.compute(tree, data)
# evaluate all add/sub
functions = [:"+", :"-"]
size = -1
- while(tree.length!=size)
+ while tree.length != size
FHIR.logger.debug "MATH: #{tree}"
previous_node = nil
previous_index = nil
@@ -442,17 +434,17 @@ def self.compute(tree, data)
tree.each_with_index do |node, index|
if node.is_a?(Symbol) && functions.include?(node)
previous_node = compute(previous_node, data) if previous_node.is_a?(FluentPath::Expression)
- tree[index+1] = compute(tree[index+1], data) if tree[index+1].is_a?(FluentPath::Expression)
+ tree[index + 1] = compute(tree[index + 1], data) if tree[index + 1].is_a?(FluentPath::Expression)
left = previous_node
- right = tree[index+1]
+ right = tree[index + 1]
case node
when :"+"
- tree[index] = (left+right)
+ tree[index] = (left + right)
when :"-"
- tree[index] = (left-right)
+ tree[index] = (left - right)
end
tree[previous_index] = nil
- tree[index+1] = nil
+ tree[index + 1] = nil
break
end
previous_index = index
@@ -465,7 +457,7 @@ def self.compute(tree, data)
# evaluate all equality tests
functions = [:"=", :"!=", :"<=", :">=", :"<", :">"]
size = -1
- while(tree.length!=size)
+ while tree.length != size
FHIR.logger.debug "EQ: #{tree}"
previous_node = nil
previous_index = nil
@@ -473,27 +465,27 @@ def self.compute(tree, data)
tree.each_with_index do |node, index|
if node.is_a?(Symbol) && functions.include?(node)
previous_node = compute(previous_node, data) if previous_node.is_a?(FluentPath::Expression)
- tree[index+1] = compute(tree[index+1], data) if tree[index+1].is_a?(FluentPath::Expression)
+ tree[index + 1] = compute(tree[index + 1], data) if tree[index + 1].is_a?(FluentPath::Expression)
left = previous_node
- right = tree[index+1]
+ right = tree[index + 1]
case node
when :"="
- tree[index] = (left==right)
+ tree[index] = (left == right)
when :"!="
- tree[index] = (left!=right)
+ tree[index] = (left != right)
when :"<="
- tree[index] = (left<=right)
+ tree[index] = (left <= right)
when :">="
- tree[index] = (left>=right)
+ tree[index] = (left >= right)
when :"<"
- tree[index] = (left"
- tree[index] = (left>right)
+ tree[index] = (left > right)
else
raise "Equality operator not implemented: #{node}"
end
tree[previous_index] = nil
- tree[index+1] = nil
+ tree[index + 1] = nil
break
end
previous_index = index
@@ -506,7 +498,7 @@ def self.compute(tree, data)
# evaluate all logical tests
functions = [:and, :or, :xor]
size = -1
- while(tree.length!=size)
+ while tree.length != size
FHIR.logger.debug "LOGIC: #{tree}"
previous_node = nil
previous_index = nil
@@ -514,21 +506,21 @@ def self.compute(tree, data)
tree.each_with_index do |node, index|
if node.is_a?(Symbol) && functions.include?(node)
previous_node = compute(previous_node, data) if previous_node.is_a?(FluentPath::Expression)
- tree[index+1] = compute(tree[index+1], data) if tree[index+1].is_a?(FluentPath::Expression)
+ tree[index + 1] = compute(tree[index + 1], data) if tree[index + 1].is_a?(FluentPath::Expression)
left = convert_to_boolean(previous_node)
- right = convert_to_boolean(tree[index+1])
+ right = convert_to_boolean(tree[index + 1])
case node
when :and
- tree[index] = (left&&right)
+ tree[index] = (left && right)
when :or
- tree[index] = (left||right)
+ tree[index] = (left || right)
when :xor
- tree[index] = (left^right)
+ tree[index] = (left ^ right)
else
raise "Logical operator not implemented: #{node}"
end
tree[previous_index] = nil
- tree[index+1] = nil
+ tree[index + 1] = nil
break
end
previous_index = index
@@ -540,7 +532,7 @@ def self.compute(tree, data)
functions = [:implies]
size = -1
- while(tree.length!=size)
+ while tree.length != size
FHIR.logger.debug "IMPLIES: #{tree}"
previous_node = nil
previous_index = nil
@@ -548,18 +540,18 @@ def self.compute(tree, data)
tree.each_with_index do |node, index|
if node.is_a?(Symbol) && functions.include?(node)
previous_node = compute(previous_node, data) if previous_node.is_a?(FluentPath::Expression)
- tree[index+1] = compute(tree[index+1], data) if tree[index+1].is_a?(FluentPath::Expression)
+ tree[index + 1] = compute(tree[index + 1], data) if tree[index + 1].is_a?(FluentPath::Expression)
case node
when :implies
tree[index] = false
- exists = !previous_node.nil? && previous_node!=:null
- implication = convert_to_boolean(tree[index+1])
- tree[index] = true if (exists && (implication || tree[index+1]==false))
+ exists = !previous_node.nil? && previous_node != :null
+ implication = convert_to_boolean(tree[index + 1])
+ tree[index] = true if exists && (implication || tree[index + 1] == false)
else
raise "Logical operator not implemented: #{node}"
end
tree[previous_index] = nil
- tree[index+1] = nil
+ tree[index + 1] = nil
break
end
previous_index = index
@@ -586,5 +578,4 @@ def self.compute(tree, data)
FHIR.logger.debug "RETURN: #{tree.first}"
tree.first
end
-
end
diff --git a/lib/fhir_models/fluentpath/expression.rb b/lib/fhir_models/fluentpath/expression.rb
index edcbf4e27..3def0e0da 100644
--- a/lib/fhir_models/fluentpath/expression.rb
+++ b/lib/fhir_models/fluentpath/expression.rb
@@ -1,6 +1,5 @@
module FluentPath
class Expression
-
attr_accessor :tree
def initialize(tree)
@@ -8,7 +7,7 @@ def initialize(tree)
end
def to_s
- "#{@tree}"
+ @tree.to_s
end
def inspect
@@ -16,9 +15,8 @@ def inspect
end
def clone
- clone_tree = @tree.map{|x|x.clone rescue x}
+ clone_tree = @tree.map { |x| x.clone rescue x }
FluentPath::Expression.new(clone_tree)
end
-
end
end
diff --git a/lib/fhir_models/fluentpath/parse.rb b/lib/fhir_models/fluentpath/parse.rb
index d63e53e88..62fa88585 100644
--- a/lib/fhir_models/fluentpath/parse.rb
+++ b/lib/fhir_models/fluentpath/parse.rb
@@ -1,9 +1,8 @@
module FluentPath
-
@@reserved = ['all', 'not', 'empty', 'exists', 'where', 'select', 'extension', 'startsWith', 'contains', 'in', 'distinct', '=', '!=', '<=', '>=', '<', '>', 'and', 'or', 'xor', '+', '-', '/', '*', 'toInteger', 'implies', 'children', 'first', 'last', 'tail', 'count', 'substring']
def self.parse(expression)
- build_tree( tokenize(expression) )
+ build_tree(tokenize(expression))
end
# This method tokenizes the expression into a flat array of tokens
@@ -11,13 +10,11 @@ def self.tokenize(expression)
raw_tokens = expression.gsub('()', '').split(/(\(|\)|\s|>=|<=|>|<|=|!=|\+|-|\/|\*)/)
# recreate strings if they were split
size = nil
- while(raw_tokens.include?("'") && size!=raw_tokens.length)
+ while raw_tokens.include?("'") && size != raw_tokens.length
index = raw_tokens.index("'")
- e_index = raw_tokens[(index+1)..raw_tokens.length].index("'")
- raw_tokens[index] = raw_tokens[index..(index+e_index+1)].join
- for i in (index+1)..(index+e_index+1)
- raw_tokens[i] = nil
- end
+ e_index = raw_tokens[(index + 1)..raw_tokens.length].index("'")
+ raw_tokens[index] = raw_tokens[index..(index + e_index + 1)].join
+ ((index + 1)..(index + e_index + 1)).each { |i| raw_tokens[i] = nil }
raw_tokens.compact!
size = raw_tokens.length
end
@@ -27,11 +24,11 @@ def self.tokenize(expression)
raw_tokens.each do |token|
# split a path unless it is quoted
if token.include?('.') && !(token.start_with?("'") && token.end_with?("'"))
- token.split('.').each{|t|tokens << t}
+ token.split('.').each { |t| tokens << t }
# split arrays and replace with array
elsif token.include?('|')
array = []
- token.split('|').each{|t|array << t.delete('\'')}
+ token.split('|').each { |t| array << t.delete('\'') }
tokens << array
else
tokens << token
@@ -39,28 +36,26 @@ def self.tokenize(expression)
end
# we may need to reassemble quoted strings again
reassemble_strings(tokens)
- tokens.delete_if { |token| (token.length==0 || (token.is_a?(String) && token.match(/\S/).nil?) ) }
+ tokens.delete_if { |token| (token.length.zero? || (token.is_a?(String) && token.match(/\S/).nil?)) }
FHIR.logger.debug "TOKENS: #{tokens}"
tokens
end
def self.reassemble_strings(tokens)
tokens.each_with_index do |token, index|
- if token.is_a?(String)
- e_index = nil
- if token.start_with?('"') && !token.end_with?('"')
- e_index = tokens[index..-1].index{|t| t.end_with?('"')}
- elsif token.start_with?("'") && !token.end_with?("'")
- e_index = tokens[index..-1].index{|t| t.end_with?("'")}
- end
- if e_index
- i = index+1
- while(i <= index+e_index)
- tokens[index] += tokens[i]
- tokens[i] = ''
- i+=1
- end
- end
+ next unless token.is_a?(String)
+ e_index = nil
+ if token.start_with?('"') && !token.end_with?('"')
+ e_index = tokens[index..-1].index { |t| t.end_with?('"') }
+ elsif token.start_with?("'") && !token.end_with?("'")
+ e_index = tokens[index..-1].index { |t| t.end_with?("'") }
+ end
+ next unless e_index
+ i = index + 1
+ while i <= index + e_index
+ tokens[index] += tokens[i]
+ tokens[i] = ''
+ i += 1
end
end
end
@@ -69,7 +64,7 @@ def self.reassemble_strings(tokens)
def self.build_tree(tokens)
return if tokens.empty?
tree = []
- while tokens.length > 0
+ until tokens.empty?
token = tokens.delete_at(0)
if '(' == token # sub expression
tree << FluentPath::Expression.new(build_tree(tokens))
@@ -81,8 +76,8 @@ def self.build_tree(tokens)
end
# post-processing
tree.each_with_index do |t, index|
- if t==:extension # 'extension' can be a path or a function call (if followed by a block)
- next_token = tree[index+1]
+ if t == :extension # 'extension' can be a path or a function call (if followed by a block)
+ next_token = tree[index + 1]
tree[index] = 'extension' if next_token.nil? || !next_token.is_a?(FluentPath::Expression)
end
end
@@ -99,10 +94,9 @@ def self.atom(token)
rescue
value = token
value = token.to_sym if @@reserved.include?(token)
- value = true if token=='true'
- value = false if token=='false'
+ value = true if token == 'true'
+ value = false if token == 'false'
end
value
end
-
end
diff --git a/lib/fhir_models/tasks/tasks.rake b/lib/fhir_models/tasks/tasks.rake
index 7fb9aada2..29e839f91 100644
--- a/lib/fhir_models/tasks/tasks.rake
+++ b/lib/fhir_models/tasks/tasks.rake
@@ -1,6 +1,5 @@
require 'fhir_models'
namespace :fhir do
-
desc 'console'
task :console, [] do |_t, _args|
sh 'bin/console'
@@ -19,7 +18,6 @@ namespace :fhir do
# 4. generate extensions?
# 5. generate profiles?
-
end
desc 'preprocess definitions'
@@ -44,52 +42,52 @@ namespace :fhir do
# copy structure definitions and profiles...
src = File.join(fhir_build_path, 'publish')
dest = File.join(defns, 'structures')
- copy_artifacts( ['profiles-types.json', 'profiles-resources.json', 'profiles-others.json', 'search-parameters.json', 'extension-definitions.json'], src, dest)
+ copy_artifacts(['profiles-types.json', 'profiles-resources.json', 'profiles-others.json', 'search-parameters.json', 'extension-definitions.json'], src, dest)
# copy valuesets and expansions...
dest = File.join(defns, 'valuesets')
- copy_artifacts( ['expansions.json', 'valuesets.json', 'v2-tables.json', 'v3-codesystems.json'], src, dest)
+ copy_artifacts(['expansions.json', 'valuesets.json', 'v2-tables.json', 'v3-codesystems.json'], src, dest)
# copy all the XML schemas
puts ' Copying XML schemas...'
files = Dir.glob(File.join(src, '*.xsd'))
- files.map!{|f|File.basename(f)}
+ files.map! { |f| File.basename(f) }
dest = File.join(defns, 'schema')
copy_artifacts(files, src, dest, false)
# delete the JSON examples
dest = File.join(root, 'examples', 'json')
puts ' Replacing JSON examples...'
- Dir.glob(File.join(dest, '*')).each{|f|File.delete(f) if !File.directory?(f)}
+ Dir.glob(File.join(dest, '*')).each { |f| File.delete(f) unless File.directory?(f) }
# copy the new JSON examples over
files = Dir.glob(File.join(src, '*.json'))
- files.map!{|f|File.basename(f)}
- files.keep_if{|f| f.include?('example') && !f.include?('canonical')}
+ files.map! { |f| File.basename(f) }
+ files.keep_if { |f| f.include?('example') && !f.include?('canonical') }
copy_artifacts(files, src, dest, false)
# copy the qicore examples too
qicore = File.join(src, 'qicore')
files = Dir.glob(File.join(qicore, '*.json'))
- files.map!{|f|File.basename(f)}
- files.keep_if{|f| f.include?('example') && !f.include?('canonical')}
+ files.map! { |f| File.basename(f) }
+ files.keep_if { |f| f.include?('example') && !f.include?('canonical') }
copy_artifacts(files, qicore, dest, false)
# delete the XML examples
dest = File.join(root, 'examples', 'xml')
puts ' Replacing XML examples...'
- Dir.glob(File.join(dest, '*')).each{|f|File.delete(f) if !File.directory?(f)}
+ Dir.glob(File.join(dest, '*')).each { |f| File.delete(f) unless File.directory?(f) }
# copy the new XML examples over
files = Dir.glob(File.join(src, '*.xml'))
- files.map!{|f|File.basename(f)}
- files.keep_if{|f| f.include?('example') && !f.include?('canonical')}
+ files.map! { |f| File.basename(f) }
+ files.keep_if { |f| f.include?('example') && !f.include?('canonical') }
copy_artifacts(files, src, dest, false)
# copy the qicore examples too
files = Dir.glob(File.join(qicore, '*.xml'))
- files.map!{|f|File.basename(f)}
- files.keep_if{|f| f.include?('example') && !f.include?('canonical')}
+ files.map! { |f| File.basename(f) }
+ files.keep_if { |f| f.include?('example') && !f.include?('canonical') }
copy_artifacts(files, qicore, dest, false)
# copy the version info
- copy_artifacts( ['version.info'], src, defns)
+ copy_artifacts(['version.info'], src, defns)
puts 'Done.'
end
@@ -98,20 +96,18 @@ namespace :fhir do
task :invariants, [] do |_t, _args|
# create a generator and load the definitions
d = FHIR::Definitions
- defs = d.get_complex_types + d.get_resource_definitions
+ defs = d.get_complex_types + d.get_resource_definitions
invariants = {}
defs.each do |structure_definition|
structure_definition['snapshot']['element'].each do |element|
- if element['constraint']
- element['constraint'].each do |constraint|
- if constraint['expression']
- invariants[constraint['key']] = {
- path: element['path'],
- expression: constraint['expression'],
- human: constraint['human']
- }
- end
- end
+ next unless element['constraint']
+ element['constraint'].each do |constraint|
+ next unless constraint['expression']
+ invariants[constraint['key']] = {
+ path: element['path'],
+ expression: constraint['expression'],
+ human: constraint['human']
+ }
end
end
end
@@ -123,12 +119,11 @@ namespace :fhir do
puts 'Wrote invariants into pipe-delimited file: invariants.txt'
end
- def copy_artifacts(artifacts, src_folder, dest_folder, verbose=true)
+ def copy_artifacts(artifacts, src_folder, dest_folder, verbose = true)
artifacts.each do |artifact|
puts " Copying #{artifact}..." if verbose
src = File.join(src_folder, artifact)
FileUtils.copy src, dest_folder
end
end
-
end
diff --git a/test/unit/contents_test.rb b/test/unit/contents_test.rb
index 0545b7e1c..d87d4109b 100644
--- a/test/unit/contents_test.rb
+++ b/test/unit/contents_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class ContentsTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
EXAMPLE_JSON = File.join('lib', 'fhir_models', 'examples', 'json', 'patient-example.json')
EXAMPLE_XML = File.join('lib', 'fhir_models', 'examples', 'xml', 'patient-example.xml')
@@ -12,14 +8,14 @@ def test_xml_from_contents
xml = File.read(EXAMPLE_XML)
patient = FHIR.from_contents(xml)
assert !patient.nil?, 'From contents did not succeed with XML.'
- assert patient.id=='example', 'Patient did not deserialize correctly.'
+ assert patient.id == 'example', 'Patient did not deserialize correctly.'
end
def test_json_from_contents
json = File.read(EXAMPLE_JSON)
patient = FHIR.from_contents(json)
assert !patient.nil?, 'From contents did not succeed with JSON.'
- assert patient.id=='example', 'Patient did not deserialize correctly.'
+ assert patient.id == 'example', 'Patient did not deserialize correctly.'
end
def test_to_reference
@@ -27,7 +23,7 @@ def test_to_reference
patient = FHIR.from_contents(json)
reference = patient.to_reference
assert reference.is_a?(FHIR::Reference), 'Resource unable to create a self-reference.'
- assert reference.reference=='Patient/example', 'Resource did not generate self-reference correctly.'
+ assert reference.reference == 'Patient/example', 'Resource did not generate self-reference correctly.'
end
def test_negative_json_contents
@@ -39,5 +35,4 @@ def test_negative_xml_contents
nothing = FHIR.from_contents('')
assert nothing.nil?, 'From contents should have returned nil.'
end
-
end
diff --git a/test/unit/equality_test.rb b/test/unit/equality_test.rb
index 148a6cbe6..f3ca2ad7c 100644
--- a/test/unit/equality_test.rb
+++ b/test/unit/equality_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class EqualityTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
EXAMPLE_ROOT = File.join('lib', 'fhir_models', 'examples')
ERROR_DIR = File.join('tmp', 'errors', 'EqualityTest')
@@ -16,21 +12,21 @@ class EqualityTest < Test::Unit::TestCase
example_json_files = File.join(EXAMPLE_ROOT, '**', '*.json')
example_xml_files = File.join(EXAMPLE_ROOT, '**', '*.xml')
- Dir.glob(example_json_files).each do | example_file |
+ Dir.glob(example_json_files).each do |example_file|
example_name = File.basename(example_file, '.json')
define_method("test_equality_#{example_name}_json") do
run_json_equality_test(example_file, example_name)
end
end
- Dir.glob(example_xml_files).each do | example_file |
+ Dir.glob(example_xml_files).each do |example_file|
example_name = File.basename(example_file, '.xml')
define_method("test_equality_#{example_name}_xml") do
run_xml_equality_test(example_file, example_name)
end
end
- Dir.glob(example_json_files).each do | example_json_file |
+ Dir.glob(example_json_files).each do |example_json_file|
example_name = File.basename(example_json_file, '.json')
example_xml_file = File.join(EXAMPLE_ROOT, 'xml', "#{example_name}.xml")
define_method("test_equality_#{example_name}") do
@@ -40,57 +36,56 @@ class EqualityTest < Test::Unit::TestCase
def run_json_equality_test(example_file, example_name)
input_json = File.read(example_file)
- instanceA = FHIR::Json.from_json(input_json)
- instanceB = FHIR::Json.from_json(input_json)
- if(!instanceA.equals?(instanceB) || !instanceB.equals?(instanceA))
- File.open("#{ERROR_DIR}/#{example_name}.json", 'w:UTF-8') {|file| file.write(input_json)}
+ instance_a = FHIR::Json.from_json(input_json)
+ instance_b = FHIR::Json.from_json(input_json)
+ if !instance_a.equals?(instance_b) || !instance_b.equals?(instance_a)
+ File.open("#{ERROR_DIR}/#{example_name}.json", 'w:UTF-8') { |file| file.write(input_json) }
end
- assert instanceA.equals?(instanceB), 'Instance A should be equal to instance B.'
- assert instanceB.equals?(instanceA), 'Instance B should be equal to instance A.'
+ assert instance_a.equals?(instance_b), 'Instance A should be equal to instance B.'
+ assert instance_b.equals?(instance_a), 'Instance B should be equal to instance A.'
end
def run_json_mismatch_test(example_file, example_name)
input_json = File.read(example_file)
- instanceA = FHIR::Json.from_json(input_json)
- instanceB = FHIR::Json.from_json(input_json)
- if !instanceA.mismatch(instanceB).empty?
- File.open("#{ERROR_DIR}/#{example_name}.json", 'w:UTF-8') {|file| file.write(input_json)}
+ instance_a = FHIR::Json.from_json(input_json)
+ instance_b = FHIR::Json.from_json(input_json)
+ unless instance_a.mismatch(instance_b).empty?
+ File.open("#{ERROR_DIR}/#{example_name}.json", 'w:UTF-8') { |file| file.write(input_json) }
end
- assert instanceA.mismatch(instanceB).empty?, 'Instance A should match instance B.'
+ assert instance_a.mismatch(instance_b).empty?, 'Instance A should match instance B.'
end
def run_xml_equality_test(example_file, example_name)
input_xml = File.read(example_file)
- instanceA = FHIR::Xml.from_xml(input_xml)
- instanceB = FHIR::Xml.from_xml(input_xml)
- if(!instanceA.equals?(instanceB) || !instanceB.equals?(instanceA))
- File.open("#{ERROR_DIR}/#{example_name}.xml", 'w:UTF-8') {|file| file.write(input_xml)}
+ instance_a = FHIR::Xml.from_xml(input_xml)
+ instance_b = FHIR::Xml.from_xml(input_xml)
+ if !instance_a.equals?(instance_b) || !instance_b.equals?(instance_a)
+ File.open("#{ERROR_DIR}/#{example_name}.xml", 'w:UTF-8') { |file| file.write(input_xml) }
end
- assert instanceA.equals?(instanceB), 'Instance A should be equal to instance B.'
- assert instanceB.equals?(instanceA), 'Instance B should be equal to instance A.'
+ assert instance_a.equals?(instance_b), 'Instance A should be equal to instance B.'
+ assert instance_b.equals?(instance_a), 'Instance B should be equal to instance A.'
end
def run_equality_test(example_json_file, example_xml_file, example_name)
input_json = File.read(example_json_file)
input_xml = File.read(example_xml_file)
- instanceA = FHIR::Json.from_json(input_json)
- instanceB = FHIR::Xml.from_xml(input_xml)
+ instance_a = FHIR::Json.from_json(input_json)
+ instance_b = FHIR::Xml.from_xml(input_xml)
exclude = ['div']
- if(!instanceA.equals?(instanceB, exclude) || !instanceB.equals?(instanceA, exclude))
- File.open("#{ERROR_DIR}/#{example_name}_A.json", 'w:UTF-8') {|file| file.write(instanceA.to_json)}
- File.open("#{ERROR_DIR}/#{example_name}_B.json", 'w:UTF-8') {|file| file.write(instanceB.to_json)}
- File.open("#{ERROR_DIR}/#{example_name}_A.xml", 'w:UTF-8') {|file| file.write(instanceA.to_xml)}
- File.open("#{ERROR_DIR}/#{example_name}_B.xml", 'w:UTF-8') {|file| file.write(instanceB.to_xml)}
+ if !instance_a.equals?(instance_b, exclude) || !instance_b.equals?(instance_a, exclude)
+ File.open("#{ERROR_DIR}/#{example_name}_A.json", 'w:UTF-8') { |file| file.write(instance_a.to_json) }
+ File.open("#{ERROR_DIR}/#{example_name}_B.json", 'w:UTF-8') { |file| file.write(instance_b.to_json) }
+ File.open("#{ERROR_DIR}/#{example_name}_A.xml", 'w:UTF-8') { |file| file.write(instance_a.to_xml) }
+ File.open("#{ERROR_DIR}/#{example_name}_B.xml", 'w:UTF-8') { |file| file.write(instance_b.to_xml) }
end
- assert instanceA.equals?(instanceB, exclude), 'Instance A should be equal to instance B.'
- assert instanceB.equals?(instanceA, exclude), 'Instance B should be equal to instance A.'
+ assert instance_a.equals?(instance_b, exclude), 'Instance A should be equal to instance B.'
+ assert instance_b.equals?(instance_a, exclude), 'Instance B should be equal to instance A.'
end
def test_mismatch
- x = FHIR::Patient.new({'id'=>'foo','gender'=>'male'})
- y = FHIR::Patient.new({'id'=>'foo','gender'=>'female'})
+ x = FHIR::Patient.new('id' => 'foo', 'gender' => 'male')
+ y = FHIR::Patient.new('id' => 'foo', 'gender' => 'female')
misses = x.mismatch(y)
- assert misses.first=='FHIR::Patient::gender', 'Mismatch did not detect differences.'
+ assert misses.first == 'FHIR::Patient::gender', 'Mismatch did not detect differences.'
end
-
end
diff --git a/test/unit/expansions_test.rb b/test/unit/expansions_test.rb
index 5e90adf40..01e3f8d45 100644
--- a/test/unit/expansions_test.rb
+++ b/test/unit/expansions_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class ExpansionsTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
def test_expansion
codes = FHIR::Definitions.get_codes('http://hl7.org/fhir/ValueSet/relatedperson-relationshiptype')
assert (!codes.nil? && !codes.empty?), 'Expansions did not return expected codes.'
@@ -14,5 +10,4 @@ def test_missing_expansion
codes = FHIR::Definitions.get_codes('http://projectcrucible.org/nonexisting/valueset')
assert (codes.nil? || codes.empty?), 'Expansions returned unexpected codes.'
end
-
end
diff --git a/test/unit/extension_by_name_test.rb b/test/unit/extension_by_name_test.rb
index 891cff8f1..f0595258d 100644
--- a/test/unit/extension_by_name_test.rb
+++ b/test/unit/extension_by_name_test.rb
@@ -1,45 +1,41 @@
require_relative '../test_helper'
class ExtensionByNameTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
def test_extension_by_name
patient = FHIR::Patient.new
- patient.extension << FHIR::Extension.new({ url: 'http://projectcrucible.org/extensions/foobar', valueInteger: 42 })
+ patient.extension << FHIR::Extension.new(url: 'http://projectcrucible.org/extensions/foobar', valueInteger: 42)
assert ( patient.foobar == 42), 'Method missing did not correctly find the extension value.'
end
def test_modifier_extension_by_name
patient = FHIR::Patient.new
- patient.modifierExtension << FHIR::Extension.new({ url: 'http://projectcrucible.org/extensions/foobar', valueInteger: 42 })
+ patient.modifierExtension << FHIR::Extension.new(url: 'http://projectcrucible.org/extensions/foobar', valueInteger: 42)
assert ( patient.foobar == 42), 'Method missing did not correctly find the extension value.'
end
def test_extension_by_anchor
patient = FHIR::Patient.new
- patient.extension << FHIR::Extension.new({ url: 'http://projectcrucible.org/extensions/foo#bar', valueInteger: 42 })
+ patient.extension << FHIR::Extension.new(url: 'http://projectcrucible.org/extensions/foo#bar', valueInteger: 42)
assert ( patient.bar == 42), 'Method missing did not correctly find the extension value.'
end
def test_modifier_extension_by_anchor
patient = FHIR::Patient.new
- patient.modifierExtension << FHIR::Extension.new({ url: 'http://projectcrucible.org/extensions/foo#bar', valueInteger: 42 })
+ patient.modifierExtension << FHIR::Extension.new(url: 'http://projectcrucible.org/extensions/foo#bar', valueInteger: 42)
assert ( patient.bar == 42), 'Method missing did not correctly find the modifier extension value.'
end
def test_nested_extension_by_name
patient = FHIR::Patient.new
- patient.extension << FHIR::Extension.new({ url: 'http://projectcrucible.org/extensions/foo' })
- patient.extension.first.extension << FHIR::Extension.new({ url: '#bar', valueInteger: 42 })
+ patient.extension << FHIR::Extension.new(url: 'http://projectcrucible.org/extensions/foo')
+ patient.extension.first.extension << FHIR::Extension.new(url: '#bar', valueInteger: 42)
assert ( patient.foo.bar == 42), 'Method missing did not correctly find the extension value.'
end
def test_nested_modifier_extension_by_name
patient = FHIR::Patient.new
- patient.modifierExtension << FHIR::Extension.new({ url: 'http://projectcrucible.org/extensions/foo' })
- patient.modifierExtension.first.extension << FHIR::Extension.new({ url: '#bar', valueInteger: 42 })
+ patient.modifierExtension << FHIR::Extension.new(url: 'http://projectcrucible.org/extensions/foo')
+ patient.modifierExtension.first.extension << FHIR::Extension.new(url: '#bar', valueInteger: 42)
assert ( patient.foo.bar == 42), 'Method missing did not correctly find the modifier extension value.'
end
end
diff --git a/test/unit/fluentpath/and_or_not_test.rb b/test/unit/fluentpath/and_or_not_test.rb
index 782d6f4ee..a6fdf2fd5 100644
--- a/test/unit/fluentpath/and_or_not_test.rb
+++ b/test/unit/fluentpath/and_or_not_test.rb
@@ -1,17 +1,16 @@
require_relative '../../test_helper'
class AndOrNotTest < Test::Unit::TestCase
-
def test_and_or_not
data = {
'name' => {
- 'given' => [ 'Joe', 'John' ]
+ 'given' => %w(Joe John)
},
'gender' => 'male',
'deceased' => false
}
- result = FluentPath.evaluate('deceased.not() and ((name.given or name.family) and gender) and deceased.not()',data)
- assert result==true, 'Failed and_or_not test.'
+ result = FluentPath.evaluate('deceased.not() and ((name.given or name.family) and gender) and deceased.not()', data)
+ assert result == true, 'Failed and_or_not test.'
end
def test_xor_tt
@@ -19,8 +18,8 @@ def test_xor_tt
'a' => true,
'b' => true
}
- result = FluentPath.evaluate('a xor b',data)
- assert result==false, 'Failed xor test.'
+ result = FluentPath.evaluate('a xor b', data)
+ assert result == false, 'Failed xor test.'
end
def test_xor_tf
@@ -28,8 +27,8 @@ def test_xor_tf
'a' => true,
'b' => false
}
- result = FluentPath.evaluate('a xor b',data)
- assert result==true, 'Failed xor test.'
+ result = FluentPath.evaluate('a xor b', data)
+ assert result == true, 'Failed xor test.'
end
def test_xor_ft
@@ -37,8 +36,8 @@ def test_xor_ft
'a' => false,
'b' => true
}
- result = FluentPath.evaluate('a xor b',data)
- assert result==true, 'Failed xor test.'
+ result = FluentPath.evaluate('a xor b', data)
+ assert result == true, 'Failed xor test.'
end
def test_xor_ff
@@ -46,24 +45,23 @@ def test_xor_ff
'a' => false,
'b' => false
}
- result = FluentPath.evaluate('a xor b',data)
- assert result==false, 'Failed xor test.'
+ result = FluentPath.evaluate('a xor b', data)
+ assert result == false, 'Failed xor test.'
end
def test_xor_tnil
data = {
'a' => true
}
- result = FluentPath.evaluate('a xor b',data)
- assert result==true, 'Failed xor test.'
+ result = FluentPath.evaluate('a xor b', data)
+ assert result == true, 'Failed xor test.'
end
def test_xor_nilstring
data = {
'b' => 'foo'
}
- result = FluentPath.evaluate('a xor b',data)
- assert result==true, 'Failed xor test.'
- end
-
-end
\ No newline at end of file
+ result = FluentPath.evaluate('a xor b', data)
+ assert result == true, 'Failed xor test.'
+ end
+end
diff --git a/test/unit/fluentpath/convert_to_boolean_test.rb b/test/unit/fluentpath/convert_to_boolean_test.rb
index f99a6dec9..3fab16699 100644
--- a/test/unit/fluentpath/convert_to_boolean_test.rb
+++ b/test/unit/fluentpath/convert_to_boolean_test.rb
@@ -1,59 +1,57 @@
require_relative '../../test_helper'
class ConvertToBooleanTest < Test::Unit::TestCase
-
def test_true_is_true
data = true
result = FluentPath.convert_to_boolean(data)
- assert result==true, 'true should be converted to true.'
+ assert result == true, 'true should be converted to true.'
end
def test_false_is_false
data = false
result = FluentPath.convert_to_boolean(data)
- assert result==false, 'false should be converted to false.'
+ assert result == false, 'false should be converted to false.'
end
def test_nil_is_false
data = nil
result = FluentPath.convert_to_boolean(data)
- assert result==false, 'nil should be converted to false.'
+ assert result == false, 'nil should be converted to false.'
end
def test_empty_hash_is_false
data = {}
result = FluentPath.convert_to_boolean(data)
- assert result==false, 'An empty hash should be converted to false.'
+ assert result == false, 'An empty hash should be converted to false.'
end
def test_empty_array_is_false
data = []
result = FluentPath.convert_to_boolean(data)
- assert result==false, 'An empty array should be converted to false.'
+ assert result == false, 'An empty array should be converted to false.'
end
def test_array_with_contents_is_true
- data = [false,true,false]
+ data = [false, true, false]
result = FluentPath.convert_to_boolean(data)
- assert result==true, 'An array with contents should be converted to true.'
+ assert result == true, 'An array with contents should be converted to true.'
end
def test_hash_with_contents_is_true
- data = {'foo'=>'bar'}
+ data = { 'foo' => 'bar' }
result = FluentPath.convert_to_boolean(data)
- assert result==true, 'A hash with contents should be converted to true.'
+ assert result == true, 'A hash with contents should be converted to true.'
end
def test_string_is_true
data = 'foo'
result = FluentPath.convert_to_boolean(data)
- assert result==true, 'A string should be converted to true.'
+ assert result == true, 'A string should be converted to true.'
end
def test_empty_string_is_true
data = ''
result = FluentPath.convert_to_boolean(data)
- assert result==true, 'An empty string should be converted to true.'
+ assert result == true, 'An empty string should be converted to true.'
end
-
-end
\ No newline at end of file
+end
diff --git a/test/unit/fluentpath/existence_test.rb b/test/unit/fluentpath/existence_test.rb
index 76cd287ed..09bc8b3b7 100644
--- a/test/unit/fluentpath/existence_test.rb
+++ b/test/unit/fluentpath/existence_test.rb
@@ -1,91 +1,91 @@
require_relative '../../test_helper'
class ExistenceTest < Test::Unit::TestCase
-
- NAMES = ['Bob','Robert']
+ NAMES = %w(Bob Robert)
PATIENT = {
'name' => {
'given' => NAMES
},
'deceasedBoolean' => false,
'aliveBoolean' => true,
- 'all_true' => [true,true,true],
- 'some_true' => [false,true,false],
- 'codes' => ['A','A','B'],
- 'numbers' => [0.1,0.1,1,-2],
+ 'all_true' => [true, true, true],
+ 'some_true' => [false, true, false],
+ 'codes' => %w(A A B),
+ 'numbers' => [0.1, 0.1, 1, -2],
'resourceType' => 'Patient'
}
# ------------------------- empty() -------------------------------------
def test_empty_false
- result = FluentPath.evaluate('Patient.name.given.empty()',PATIENT)
- assert result==false, 'Failed empty test.'
+ result = FluentPath.evaluate('Patient.name.given.empty()', PATIENT)
+ assert result == false, 'Failed empty test.'
end
def test_empty_true
- result = FluentPath.evaluate('Patient.name.family.empty()',PATIENT)
- assert result==true, 'Failed empty test.'
+ result = FluentPath.evaluate('Patient.name.family.empty()', PATIENT)
+ assert result == true, 'Failed empty test.'
end
# ------------------------- not() -------------------------------------
def test_not_true
- result = FluentPath.evaluate('Patient.deceasedBoolean.not()',PATIENT)
- assert result==true, 'Failed not test.'
+ result = FluentPath.evaluate('Patient.deceasedBoolean.not()', PATIENT)
+ assert result == true, 'Failed not test.'
end
def test_not_false
- result = FluentPath.evaluate('Patient.aliveBoolean.not()',PATIENT)
- assert result==false, 'Failed not test.'
+ result = FluentPath.evaluate('Patient.aliveBoolean.not()', PATIENT)
+ assert result == false, 'Failed not test.'
end
def test_not_empty
- result = FluentPath.evaluate('Patient.doesNotExist.not()',PATIENT)
- assert result==true, 'Failed not test.'
+ result = FluentPath.evaluate('Patient.doesNotExist.not()', PATIENT)
+ assert result == true, 'Failed not test.'
end
+
def test_not_notempty
- result = FluentPath.evaluate('Patient.name.given.not()',PATIENT)
- assert result==false, 'Failed not test.'
+ result = FluentPath.evaluate('Patient.name.given.not()', PATIENT)
+ assert result == false, 'Failed not test.'
end
# ------------------------- exists() -------------------------------------
def test_exists_true
- result = FluentPath.evaluate('Patient.name.given.exists()',PATIENT)
- assert result==true, 'Failed exists test.'
+ result = FluentPath.evaluate('Patient.name.given.exists()', PATIENT)
+ assert result == true, 'Failed exists test.'
end
def test_exists_false
- result = FluentPath.evaluate('Patient.name.family.exists()',PATIENT)
- assert result==false, 'Failed exists test.'
+ result = FluentPath.evaluate('Patient.name.family.exists()', PATIENT)
+ assert result == false, 'Failed exists test.'
end
# ------------------------- all() -------------------------------------
def test_all_true
- result = FluentPath.evaluate('Patient.all_true.all()',PATIENT)
- assert result==true, 'Failed all test.'
+ result = FluentPath.evaluate('Patient.all_true.all()', PATIENT)
+ assert result == true, 'Failed all test.'
end
def test_all_false
- result = FluentPath.evaluate('Patient.some_true.all()',PATIENT)
- assert result==false, 'Failed all test.'
+ result = FluentPath.evaluate('Patient.some_true.all()', PATIENT)
+ assert result == false, 'Failed all test.'
end
-
+
# ------------------------- subsetOf([]) -------------------------------------
# ------------------------- supersetOf([]) -------------------------------------
# ------------------------- isDistinct() -------------------------------------
# ------------------------- distinct() -------------------------------------
def test_distinct_strings
- result = FluentPath.evaluate('Patient.codes.distinct()',PATIENT)
- assert result==['A','B'], 'Failed distinct test.'
+ result = FluentPath.evaluate('Patient.codes.distinct()', PATIENT)
+ assert result == %w(A B), 'Failed distinct test.'
end
def test_distinct_numbers
- result = FluentPath.evaluate('Patient.numbers.distinct()',PATIENT)
- assert result==[0.1,1,-2], 'Failed distinct test.'
- end
+ result = FluentPath.evaluate('Patient.numbers.distinct()', PATIENT)
+ assert result == [0.1, 1, -2], 'Failed distinct test.'
+ end
def test_distinct_booleans
- result = FluentPath.evaluate('Patient.some_true.distinct()',PATIENT)
- assert result==[false,true], 'Failed distinct test.'
+ result = FluentPath.evaluate('Patient.some_true.distinct()', PATIENT)
+ assert result == [false, true], 'Failed distinct test.'
end
# ------------------------- count() -------------------------------------
@@ -93,62 +93,61 @@ def test_distinct_booleans
def test_in_numbers
data = {
'foo' => 2,
- 'set' => [1,2,3]
+ 'set' => [1, 2, 3]
}
- result = FluentPath.evaluate('foo in set',data)
- assert result==true, 'Failed in_numbers test.'
+ result = FluentPath.evaluate('foo in set', data)
+ assert result == true, 'Failed in_numbers test.'
end
def test_in_strings
data = {
'foo' => 'B',
- 'set' => ['A','B','C']
+ 'set' => %w(A B C)
}
- result = FluentPath.evaluate('foo in set',data)
- assert result==true, 'Failed in_strings test.'
+ result = FluentPath.evaluate('foo in set', data)
+ assert result == true, 'Failed in_strings test.'
end
def test_in_booleans
data = {
'foo' => true,
- 'set' => [false,true,false]
+ 'set' => [false, true, false]
}
- result = FluentPath.evaluate('foo in set',data)
- assert result==true, 'Failed in_booleans test.'
+ result = FluentPath.evaluate('foo in set', data)
+ assert result == true, 'Failed in_booleans test.'
end
def test_in_numbers_false
data = {
'foo' => 5,
- 'set' => [1,2,3]
+ 'set' => [1, 2, 3]
}
- result = FluentPath.evaluate('foo in set',data)
- assert result==false, 'Failed in_numbers_false test.'
+ result = FluentPath.evaluate('foo in set', data)
+ assert result == false, 'Failed in_numbers_false test.'
end
def test_in_strings_false
data = {
'foo' => 'D',
- 'set' => ['A','B','C']
+ 'set' => %w(A B C)
}
- result = FluentPath.evaluate('foo in set',data)
- assert result==false, 'Failed in_strings_false test.'
+ result = FluentPath.evaluate('foo in set', data)
+ assert result == false, 'Failed in_strings_false test.'
end
def test_in_booleans_false
data = {
'foo' => true,
- 'set' => [false,false]
+ 'set' => [false, false]
}
- result = FluentPath.evaluate('foo in set',data)
- assert result==false, 'Failed in_booleans_false test.'
+ result = FluentPath.evaluate('foo in set', data)
+ assert result == false, 'Failed in_booleans_false test.'
end
# ------------------- set creation ------------------------
def test_set_creation
data = {}
- result = FluentPath.evaluate("('A'|'B'|'C')",data)
- assert result==['A','B','C'], 'Failed set creation.'
+ result = FluentPath.evaluate("('A'|'B'|'C')", data)
+ assert result == %w(A B C), 'Failed set creation.'
end
-
end
diff --git a/test/unit/fluentpath/extension_test.rb b/test/unit/fluentpath/extension_test.rb
index 40420fa84..94d165557 100644
--- a/test/unit/fluentpath/extension_test.rb
+++ b/test/unit/fluentpath/extension_test.rb
@@ -1,18 +1,17 @@
require_relative '../../test_helper'
class ExtensionTest < Test::Unit::TestCase
-
CONCEPT = {
- 'coding'=> [{
- 'system'=> 'http://hl7.org/fhir/v3/Race',
- 'code'=> '1096-7'
- }]
+ 'coding' => [{
+ 'system' => 'http://hl7.org/fhir/v3/Race',
+ 'code' => '1096-7'
+ }]
}
EXT = {
- 'url'=>'http://hl7.org/fhir/StructureDefinition/us-core-race',
- 'valueCodeableConcept'=> CONCEPT
+ 'url' => 'http://hl7.org/fhir/StructureDefinition/us-core-race',
+ 'valueCodeableConcept' => CONCEPT
}
- EXT_ARRAY = [ EXT ]
+ EXT_ARRAY = [EXT]
PATIENT = {
'name' => {
'given' => ['Foo']
@@ -22,33 +21,32 @@ class ExtensionTest < Test::Unit::TestCase
}
def test_extension
- result = FluentPath.evaluate('Patient.extension',PATIENT)
- assert result==EXT_ARRAY, 'Failed to resolve extension.'
+ result = FluentPath.evaluate('Patient.extension', PATIENT)
+ assert result == EXT_ARRAY, 'Failed to resolve extension.'
end
def test_extension_with_block
- result = FluentPath.evaluate("Patient.extension('http://hl7.org/fhir/StructureDefinition/us-core-race')",PATIENT)
- assert result==EXT, 'Failed to resolve extension by name.'
+ result = FluentPath.evaluate("Patient.extension('http://hl7.org/fhir/StructureDefinition/us-core-race')", PATIENT)
+ assert result == EXT, 'Failed to resolve extension by name.'
end
def test_extension_item
- result = FluentPath.evaluate('Patient.extension[0]',PATIENT)
- assert result==EXT, 'Failed to resolve extension by index.'
+ result = FluentPath.evaluate('Patient.extension[0]', PATIENT)
+ assert result == EXT, 'Failed to resolve extension by index.'
end
def test_extension_value
- result = FluentPath.evaluate('Patient.extension[0].value',PATIENT)
- assert result==CONCEPT, 'Failed to resolve extension value.'
+ result = FluentPath.evaluate('Patient.extension[0].value', PATIENT)
+ assert result == CONCEPT, 'Failed to resolve extension value.'
end
def test_extension_with_block_value
- result = FluentPath.evaluate("Patient.extension('http://hl7.org/fhir/StructureDefinition/us-core-race').value",PATIENT)
- assert result==CONCEPT, 'Failed to resolve named extension value.'
+ result = FluentPath.evaluate("Patient.extension('http://hl7.org/fhir/StructureDefinition/us-core-race').value", PATIENT)
+ assert result == CONCEPT, 'Failed to resolve named extension value.'
end
def test_extension_missing
- result = FluentPath.evaluate("Patient.extension('http://hl7.org/fhir/StructureDefinition/us-core-ethnicity')",PATIENT)
- assert result==nil, 'Failed to resolve missing extension.'
+ result = FluentPath.evaluate("Patient.extension('http://hl7.org/fhir/StructureDefinition/us-core-ethnicity')", PATIENT)
+ assert result.nil?, 'Failed to resolve missing extension.'
end
-
-end
\ No newline at end of file
+end
diff --git a/test/unit/fluentpath/fluent_equality_test.rb b/test/unit/fluentpath/fluent_equality_test.rb
index 6fafb0ac8..f7ebf5c5f 100644
--- a/test/unit/fluentpath/fluent_equality_test.rb
+++ b/test/unit/fluentpath/fluent_equality_test.rb
@@ -1,14 +1,13 @@
require_relative '../../test_helper'
class FluentEqualityTest < Test::Unit::TestCase
-
def test_equals_strings
data = {
'a' => 'foo',
'b' => 'foo'
}
- result = FluentPath.evaluate('a = b',data)
- assert result==true, 'Failed equals test.'
+ result = FluentPath.evaluate('a = b', data)
+ assert result == true, 'Failed equals test.'
end
def test_equals_numbers
@@ -16,8 +15,8 @@ def test_equals_numbers
'a' => 1.0,
'b' => 1.0
}
- result = FluentPath.evaluate('a = b',data)
- assert result==true, 'Failed equals test.'
+ result = FluentPath.evaluate('a = b', data)
+ assert result == true, 'Failed equals test.'
end
def test_equals_dateTimes
@@ -25,8 +24,8 @@ def test_equals_dateTimes
'a' => '2016-06-06T10:55:34+01:00',
'b' => '2016-06-06T10:55:34+01:00'
}
- result = FluentPath.evaluate('a = b',data)
- assert result==true, 'Failed equals test.'
+ result = FluentPath.evaluate('a = b', data)
+ assert result == true, 'Failed equals test.'
end
def test_not_equals_strings
@@ -34,8 +33,8 @@ def test_not_equals_strings
'a' => 'foo',
'b' => 'bar'
}
- result = FluentPath.evaluate('a != b',data)
- assert result==true, 'Failed not_equals test.'
+ result = FluentPath.evaluate('a != b', data)
+ assert result == true, 'Failed not_equals test.'
end
def test_not_equals_numbers
@@ -43,8 +42,8 @@ def test_not_equals_numbers
'a' => 1.0,
'b' => 1.5
}
- result = FluentPath.evaluate('a != b',data)
- assert result==true, 'Failed not_equals test.'
+ result = FluentPath.evaluate('a != b', data)
+ assert result == true, 'Failed not_equals test.'
end
def test_not_equals_dateTimes
@@ -52,8 +51,8 @@ def test_not_equals_dateTimes
'a' => '2016-06-06T10:55:34+01:00',
'b' => '2016-06-09T10:55:34+01:00'
}
- result = FluentPath.evaluate('a != b',data)
- assert result==true, 'Failed not_equals test.'
+ result = FluentPath.evaluate('a != b', data)
+ assert result == true, 'Failed not_equals test.'
end
def test_less_than_strings
@@ -61,8 +60,8 @@ def test_less_than_strings
'a' => 'a',
'b' => 'b'
}
- result = FluentPath.evaluate('a < b',data)
- assert result==true, 'Failed less_than test.'
+ result = FluentPath.evaluate('a < b', data)
+ assert result == true, 'Failed less_than test.'
end
def test_less_than_numbers
@@ -70,8 +69,8 @@ def test_less_than_numbers
'a' => 1.0,
'b' => 1.5
}
- result = FluentPath.evaluate('a < b',data)
- assert result==true, 'Failed less_than test.'
+ result = FluentPath.evaluate('a < b', data)
+ assert result == true, 'Failed less_than test.'
end
def test_less_than_dateTimes
@@ -79,17 +78,17 @@ def test_less_than_dateTimes
'a' => '2016-06-06T10:55:34+01:00',
'b' => '2016-06-09T10:55:34+01:00'
}
- result = FluentPath.evaluate('a < b',data)
- assert result==true, 'Failed less_than test.'
- end
+ result = FluentPath.evaluate('a < b', data)
+ assert result == true, 'Failed less_than test.'
+ end
def test_greater_than_strings
data = {
'a' => 'd',
'b' => 'b'
}
- result = FluentPath.evaluate('a > b',data)
- assert result==true, 'Failed greater_than test.'
+ result = FluentPath.evaluate('a > b', data)
+ assert result == true, 'Failed greater_than test.'
end
def test_greater_than_numbers
@@ -97,8 +96,8 @@ def test_greater_than_numbers
'a' => 1.9,
'b' => 1.5
}
- result = FluentPath.evaluate('a > b',data)
- assert result==true, 'Failed greater_than test.'
+ result = FluentPath.evaluate('a > b', data)
+ assert result == true, 'Failed greater_than test.'
end
def test_greater_than_dateTimes
@@ -106,17 +105,17 @@ def test_greater_than_dateTimes
'a' => '2016-06-18T10:55:34+01:00',
'b' => '2016-06-09T10:55:34+01:00'
}
- result = FluentPath.evaluate('a > b',data)
- assert result==true, 'Failed greater_than test.'
- end
+ result = FluentPath.evaluate('a > b', data)
+ assert result == true, 'Failed greater_than test.'
+ end
def test_greater_than_equals_strings
data = {
'a' => 'd',
'b' => 'b'
}
- result = FluentPath.evaluate('a >= b',data)
- assert result==true, 'Failed greater_than_equals test.'
+ result = FluentPath.evaluate('a >= b', data)
+ assert result == true, 'Failed greater_than_equals test.'
end
def test_greater_than_equals_numbers
@@ -124,8 +123,8 @@ def test_greater_than_equals_numbers
'a' => 1.55,
'b' => 1.5
}
- result = FluentPath.evaluate('a >= b',data)
- assert result==true, 'Failed greater_than_equals test.'
+ result = FluentPath.evaluate('a >= b', data)
+ assert result == true, 'Failed greater_than_equals test.'
end
def test_greater_than_equals_dateTimes
@@ -133,9 +132,9 @@ def test_greater_than_equals_dateTimes
'a' => '2016-06-18T10:55:34+01:00',
'b' => '2016-06-09T10:55:34+01:00'
}
- result = FluentPath.evaluate('a >= b',data)
- assert result==true, 'Failed greater_than_equals test.'
- end
+ result = FluentPath.evaluate('a >= b', data)
+ assert result == true, 'Failed greater_than_equals test.'
+ end
# -------------------------------------- negative tests -----------------------------------------
@@ -144,8 +143,8 @@ def test_equals_strings_false
'a' => 'foo',
'b' => 'bar'
}
- result = FluentPath.evaluate('a = b',data)
- assert result==false, 'Failed equals test.'
+ result = FluentPath.evaluate('a = b', data)
+ assert result == false, 'Failed equals test.'
end
def test_equals_numbers_false
@@ -153,8 +152,8 @@ def test_equals_numbers_false
'a' => 1.0,
'b' => 1.2
}
- result = FluentPath.evaluate('a = b',data)
- assert result==false, 'Failed equals test.'
+ result = FluentPath.evaluate('a = b', data)
+ assert result == false, 'Failed equals test.'
end
def test_equals_dateTimes_false
@@ -162,8 +161,8 @@ def test_equals_dateTimes_false
'a' => '2016-06-06T10:55:34+01:00',
'b' => '2016-03-06T10:55:34+01:00'
}
- result = FluentPath.evaluate('a = b',data)
- assert result==false, 'Failed equals test.'
+ result = FluentPath.evaluate('a = b', data)
+ assert result == false, 'Failed equals test.'
end
def test_not_equals_strings_false
@@ -171,8 +170,8 @@ def test_not_equals_strings_false
'a' => 'foo',
'b' => 'foo'
}
- result = FluentPath.evaluate('a != b',data)
- assert result==false, 'Failed not_equals test.'
+ result = FluentPath.evaluate('a != b', data)
+ assert result == false, 'Failed not_equals test.'
end
def test_not_equals_numbers_false
@@ -180,8 +179,8 @@ def test_not_equals_numbers_false
'a' => 1.0,
'b' => 1.0
}
- result = FluentPath.evaluate('a != b',data)
- assert result==false, 'Failed not_equals test.'
+ result = FluentPath.evaluate('a != b', data)
+ assert result == false, 'Failed not_equals test.'
end
def test_not_equals_dateTimes_false
@@ -189,8 +188,8 @@ def test_not_equals_dateTimes_false
'a' => '2016-06-06T10:55:34+01:00',
'b' => '2016-06-06T10:55:34+01:00'
}
- result = FluentPath.evaluate('a != b',data)
- assert result==false, 'Failed not_equals test.'
+ result = FluentPath.evaluate('a != b', data)
+ assert result == false, 'Failed not_equals test.'
end
def test_less_than_strings_false
@@ -198,8 +197,8 @@ def test_less_than_strings_false
'a' => 'b',
'b' => 'a'
}
- result = FluentPath.evaluate('a < b',data)
- assert result==false, 'Failed less_than test.'
+ result = FluentPath.evaluate('a < b', data)
+ assert result == false, 'Failed less_than test.'
end
def test_less_than_numbers_false
@@ -207,8 +206,8 @@ def test_less_than_numbers_false
'a' => 1.5,
'b' => 1.0
}
- result = FluentPath.evaluate('a < b',data)
- assert result==false, 'Failed less_than test.'
+ result = FluentPath.evaluate('a < b', data)
+ assert result == false, 'Failed less_than test.'
end
def test_less_than_dateTimes_false
@@ -216,17 +215,17 @@ def test_less_than_dateTimes_false
'a' => '2016-06-09T10:55:34+01:00',
'b' => '2016-06-06T10:55:34+01:00'
}
- result = FluentPath.evaluate('a < b',data)
- assert result==false, 'Failed less_than test.'
- end
+ result = FluentPath.evaluate('a < b', data)
+ assert result == false, 'Failed less_than test.'
+ end
def test_greater_than_strings_false
data = {
'a' => 'a',
'b' => 'b'
}
- result = FluentPath.evaluate('a > b',data)
- assert result==false, 'Failed greater_than test.'
+ result = FluentPath.evaluate('a > b', data)
+ assert result == false, 'Failed greater_than test.'
end
def test_greater_than_numbers_false
@@ -234,8 +233,8 @@ def test_greater_than_numbers_false
'a' => 0.9,
'b' => 1.5
}
- result = FluentPath.evaluate('a > b',data)
- assert result==false, 'Failed greater_than test.'
+ result = FluentPath.evaluate('a > b', data)
+ assert result == false, 'Failed greater_than test.'
end
def test_greater_than_dateTimes_false
@@ -243,7 +242,7 @@ def test_greater_than_dateTimes_false
'a' => '2016-06-09T10:55:34+01:00',
'b' => '2016-06-18T10:55:34+01:00'
}
- result = FluentPath.evaluate('a > b',data)
- assert result==false, 'Failed greater_than test.'
- end
-end
\ No newline at end of file
+ result = FluentPath.evaluate('a > b', data)
+ assert result == false, 'Failed greater_than test.'
+ end
+end
diff --git a/test/unit/fluentpath/invariants_test.rb b/test/unit/fluentpath/invariants_test.rb
index 46263bafb..14e57faa9 100644
--- a/test/unit/fluentpath/invariants_test.rb
+++ b/test/unit/fluentpath/invariants_test.rb
@@ -1,15 +1,14 @@
require_relative '../../test_helper'
class InvariantsTest < Test::Unit::TestCase
-
def test_tim3_true
expression = "((period or frequency) and when).not()"
data = {
'period' => '2016-2017',
'frequency' => 'daily'
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed tim-3 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed tim-3 test.'
end
def test_tim3_false
@@ -19,8 +18,8 @@ def test_tim3_false
'frequency' => 'daily',
'when' => 'noon'
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed tim-3 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed tim-3 test.'
end
def test_imm1_true
@@ -33,8 +32,8 @@ def test_imm1_true
'reasonNotGiven' => []
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed imm-1 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed imm-1 test.'
end
def test_imm1_false
@@ -47,8 +46,8 @@ def test_imm1_false
'reasonNotGiven' => ['Refusal']
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed imm-1 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed imm-1 test.'
end
def test_imm2_true
@@ -60,8 +59,8 @@ def test_imm2_true
'reasonNotGiven' => []
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed imm-2 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed imm-2 test.'
end
def test_imm2_false
@@ -73,8 +72,8 @@ def test_imm2_false
'reasonNotGiven' => ['Refusal']
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed imm-2 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed imm-2 test.'
end
def test_nsd2
@@ -83,11 +82,11 @@ def test_nsd2
'resourceType' => 'NamingSystem',
'uniqueId' => {
'preferred' => true,
- 'type' => ['A','B','B']
+ 'type' => %w(A B B)
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==['A','B'], 'Failed nsd-2 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == %w(A B), 'Failed nsd-2 test.'
end
def test_obs7_false
@@ -95,18 +94,18 @@ def test_obs7_false
data = {
'resourceType' => 'Observation',
'code' => {
- 'coding' => [{'code'=>'foo'},{'code'=>'bar'}]
+ 'coding' => [{ 'code' => 'foo' }, { 'code' => 'bar' }]
},
- 'component' => [
+ 'component' => [
{
- 'code' => {'coding' => [{'code'=>'foo'},{'code'=>'bar'}]}
- },{
- 'code' => {'coding' => [{'code'=>'baz'},{'code'=>'boz'}]}
+ 'code' => { 'coding' => [{ 'code' => 'foo' }, { 'code' => 'bar' }] }
+ }, {
+ 'code' => { 'coding' => [{ 'code' => 'baz' }, { 'code' => 'boz' }] }
}
]
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed obs-7 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed obs-7 test.'
end
def test_obs7_true
@@ -114,16 +113,16 @@ def test_obs7_true
data = {
'resourceType' => 'Observation',
'code' => {
- 'coding' => [{'code'=>'foo'},{'code'=>'bar'}]
+ 'coding' => [{ 'code' => 'foo' }, { 'code' => 'bar' }]
},
- 'component' => [
+ 'component' => [
{
- 'code' => {'coding' => [{'code'=>'baz'},{'code'=>'boz'}]}
+ 'code' => { 'coding' => [{ 'code' => 'baz' }, { 'code' => 'boz' }] }
}
]
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed obs-7 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed obs-7 test.'
end
def test_per1_date_true
@@ -132,8 +131,8 @@ def test_per1_date_true
'start' => '2016-06-06',
'end' => '2016-06-16'
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed per-1 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed per-1 test.'
end
def test_per1_date_false
@@ -142,9 +141,9 @@ def test_per1_date_false
'start' => '2016-06-06',
'end' => '2016-06-01'
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed per-1 test.'
- end
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed per-1 test.'
+ end
def test_per1_dateTime_true
expression = "start.empty() or end.empty() or (start <= end)"
@@ -152,8 +151,8 @@ def test_per1_dateTime_true
'start' => '2016-06-06T10:55:34+01:00',
'end' => '2016-06-16T09:44:23+01:00'
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed per-1 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed per-1 test.'
end
def test_per1_dateTime_false
@@ -162,8 +161,8 @@ def test_per1_dateTime_false
'start' => '2016-06-06T10:55:34+01:00',
'end' => '2016-06-01T09:44:23+01:00'
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed per-1 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed per-1 test.'
end
def test_dis1_true
@@ -173,8 +172,8 @@ def test_dis1_true
'system' => 'http://unitsofmeasure.org',
'value' => 300
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed dis-1 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed dis-1 test.'
end
def test_dis1_false
@@ -184,8 +183,8 @@ def test_dis1_false
'system' => 'foobar',
'value' => 300
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed dis-1 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed dis-1 test.'
end
def test_que10_true
@@ -194,17 +193,17 @@ def test_que10_true
'type' => 'string',
'maxLength' => 300
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed que-10 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed que-10 test.'
end
def test_eld14_true
expression = "constraint.select(key).distinct()"
data = {
- 'constraint' => [{'key'=>'A'},{'key'=>'B'},{'key'=>'A'}]
+ 'constraint' => [{ 'key' => 'A' }, { 'key' => 'B' }, { 'key' => 'A' }]
}
- result = FluentPath.evaluate(expression,data)
- assert result==['A','B'], 'Failed eld-14 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == %w(A B), 'Failed eld-14 test.'
end
def test_eld2_true
@@ -213,8 +212,8 @@ def test_eld2_true
'min' => 1,
'max' => '2'
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed eld-2 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed eld-2 test.'
end
def test_eld2_false
@@ -223,8 +222,8 @@ def test_eld2_false
'min' => 1,
'max' => '0'
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed eld-2 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed eld-2 test.'
end
def test_eld2_unlimited
@@ -233,8 +232,8 @@ def test_eld2_unlimited
'min' => 1,
'max' => '*'
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed eld-2 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed eld-2 test.'
end
def test_opd2
@@ -243,8 +242,8 @@ def test_opd2
'searchType' => 'number',
'type' => 'string'
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed opd-2 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed opd-2 test.'
end
def test_sdf12
@@ -255,8 +254,8 @@ def test_sdf12
'element' => [{ 'base' => 'Patient' }]
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed sdf-12 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed sdf-12 test.'
end
def test_sdf9_true
@@ -264,13 +263,13 @@ def test_sdf9_true
data = {
'baseType' => 'Patient',
'snapshot' => {
- 'element' => [{
+ 'element' => [{
'base' => 'Patient'
}]
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==true, 'Failed sdf-9 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == true, 'Failed sdf-9 test.'
end
def test_sdf9_false
@@ -278,16 +277,15 @@ def test_sdf9_false
data = {
'baseType' => 'Patient',
'snapshot' => {
- 'element' => [{
+ 'element' => [{
'base' => 'Patient',
'label' => 'Foo',
- 'code' => [{'code'=>'Bar'}],
+ 'code' => [{ 'code' => 'Bar' }],
'requirements' => 'Baz'
}]
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed sdf-9 test.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed sdf-9 test.'
end
-
-end
\ No newline at end of file
+end
diff --git a/test/unit/fluentpath/math_test.rb b/test/unit/fluentpath/math_test.rb
index 70d38df28..02023252b 100644
--- a/test/unit/fluentpath/math_test.rb
+++ b/test/unit/fluentpath/math_test.rb
@@ -1,7 +1,6 @@
require_relative '../../test_helper'
class MathTest < Test::Unit::TestCase
-
DATA = {
'x' => 7,
'y' => 3,
@@ -10,53 +9,52 @@ class MathTest < Test::Unit::TestCase
}
def test_plus_vars
- result = FluentPath.evaluate('x + y',DATA)
- assert result==10, 'Failed addition test.'
+ result = FluentPath.evaluate('x + y', DATA)
+ assert result == 10, 'Failed addition test.'
end
def test_plus_nums
- result = FluentPath.evaluate('5 + 5',DATA)
- assert result==10, 'Failed addition test.'
+ result = FluentPath.evaluate('5 + 5', DATA)
+ assert result == 10, 'Failed addition test.'
end
def test_plus_vars_strings
- result = FluentPath.evaluate("foo + bar",DATA)
- assert result=='ab', 'Failed addition test.'
+ result = FluentPath.evaluate("foo + bar", DATA)
+ assert result == 'ab', 'Failed addition test.'
end
def test_plus_strings
- result = FluentPath.evaluate("'Joe' + ' ' + 'Smith'",DATA)
- assert result=='Joe Smith', 'Failed addition test.'
+ result = FluentPath.evaluate("'Joe' + ' ' + 'Smith'", DATA)
+ assert result == 'Joe Smith', 'Failed addition test.'
end
def test_minus_vars
- result = FluentPath.evaluate('x - y',DATA)
- assert result==4, 'Failed subtraction test.'
+ result = FluentPath.evaluate('x - y', DATA)
+ assert result == 4, 'Failed subtraction test.'
end
def test_minus_nums
- result = FluentPath.evaluate('5 - 1',DATA)
- assert result==4, 'Failed subtraction test.'
+ result = FluentPath.evaluate('5 - 1', DATA)
+ assert result == 4, 'Failed subtraction test.'
end
def test_divide_vars
- result = FluentPath.evaluate('x / y',DATA)
- assert result==2, 'Failed division test.'
+ result = FluentPath.evaluate('x / y', DATA)
+ assert result == 2, 'Failed division test.'
end
def test_divide_nums
- result = FluentPath.evaluate('6 / 3',DATA)
- assert result==2, 'Failed division test.'
+ result = FluentPath.evaluate('6 / 3', DATA)
+ assert result == 2, 'Failed division test.'
end
def test_multiply_vars
- result = FluentPath.evaluate('x * y',DATA)
- assert result==21, 'Failed multiplication test.'
+ result = FluentPath.evaluate('x * y', DATA)
+ assert result == 21, 'Failed multiplication test.'
end
def test_multiply_nums
- result = FluentPath.evaluate('6 * 3',DATA)
- assert result==18, 'Failed multiplication test.'
+ result = FluentPath.evaluate('6 * 3', DATA)
+ assert result == 18, 'Failed multiplication test.'
end
-
end
diff --git a/test/unit/fluentpath/path_test.rb b/test/unit/fluentpath/path_test.rb
index 7f4f48136..ca3aab85f 100644
--- a/test/unit/fluentpath/path_test.rb
+++ b/test/unit/fluentpath/path_test.rb
@@ -1,14 +1,13 @@
require_relative '../../test_helper'
class PathTest < Test::Unit::TestCase
-
- NAMES = ['Bob','Robert']
- WTF = ['Bobert','Rob']
+ NAMES = %w(Bob Robert)
+ WTF = %w(Bobert Rob)
PID1 = 99
PATIENT = {
'name' => [{
'given' => NAMES
- },{
+ }, {
'given' => WTF
}],
'resourceType' => 'Patient'
@@ -19,100 +18,99 @@ class PathTest < Test::Unit::TestCase
}
}
ARRAY = {
- 'Array' => ['A','B'],
+ 'Array' => %w(A B),
'index' => 1
}
ITEM = { 'base' => 'Patient' }
- LIST = [5,4,3,2,1]
+ LIST = [5, 4, 3, 2, 1]
def test_path_without_type
- result = FluentPath.evaluate('name.given',PATIENT)
- assert result==NAMES+WTF, 'Failed to navigate path.'
+ result = FluentPath.evaluate('name.given', PATIENT)
+ assert result == NAMES + WTF, 'Failed to navigate path.'
end
def test_path_with_type
- result = FluentPath.evaluate('Patient.name.given',PATIENT)
- assert result==NAMES+WTF, 'Failed to navigate path.'
+ result = FluentPath.evaluate('Patient.name.given', PATIENT)
+ assert result == NAMES + WTF, 'Failed to navigate path.'
end
def test_path_conversion_2args
- result = FluentPath.evaluate('Patient.name.given.select(substring(0,3))',PATIENT)
- assert result==['Bob','Rob','Bob','Rob'], 'Failed to navigate path.'
+ result = FluentPath.evaluate('Patient.name.given.select(substring(0,3))', PATIENT)
+ assert result == %w(Bob Rob Bob Rob), 'Failed to navigate path.'
end
def test_path_conversion_1args
- result = FluentPath.evaluate('Patient.name.given.select(substring(1))',PATIENT)
- assert result==['ob','obert','obert','ob'], 'Failed to navigate path.'
+ result = FluentPath.evaluate('Patient.name.given.select(substring(1))', PATIENT)
+ assert result == %w(ob obert obert ob), 'Failed to navigate path.'
end
def test_path_with_quotes
- result = FluentPath.evaluate('Message."PID-1"',MESSAGE)
- assert result==PID1, 'Failed to navigate path.'
+ result = FluentPath.evaluate('Message."PID-1"', MESSAGE)
+ assert result == PID1, 'Failed to navigate path.'
end
def test_array_access
- result = FluentPath.evaluate('Array[0]',ARRAY)
- assert result=='A', 'Failed to navigate path.'
- end
+ result = FluentPath.evaluate('Array[0]', ARRAY)
+ assert result == 'A', 'Failed to navigate path.'
+ end
def test_array_access_with_variable
- result = FluentPath.evaluate('Array[index]',ARRAY)
- assert result=='B', 'Failed to navigate path.'
+ result = FluentPath.evaluate('Array[index]', ARRAY)
+ assert result == 'B', 'Failed to navigate path.'
end
def test_children_first
- expression = "children().element.first()"#.label.empty() and children().element.first().code.empty() and children().element.first().requirements.empty()"
+ expression = "children().element.first()" # .label.empty() and children().element.first().code.empty() and children().element.first().requirements.empty()"
data = {
'baseType' => 'Patient',
'snapshot' => {
'element' => [ITEM]
}
}
- result = FluentPath.evaluate(expression,data)
- assert result==ITEM, 'Failed to navigate children.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == ITEM, 'Failed to navigate children.'
end
def test_first
expression = "list.first"
data = { 'list' => LIST }
- result = FluentPath.evaluate(expression,data)
- assert result==LIST.first, 'Failed to access first element.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == LIST.first, 'Failed to access first element.'
end
def test_last
expression = "list.last"
data = { 'list' => LIST }
- result = FluentPath.evaluate(expression,data)
- assert result==LIST.last, 'Failed to access last element.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == LIST.last, 'Failed to access last element.'
end
def test_tail
expression = "list.tail"
data = { 'list' => LIST }
- result = FluentPath.evaluate(expression,data)
- assert result==LIST.last(LIST.length-1), 'Failed to access tail elements.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == LIST.last(LIST.length - 1), 'Failed to access tail elements.'
end
def test_count
expression = "list.count"
data = { 'list' => LIST }
- result = FluentPath.evaluate(expression,data)
- assert result==LIST.length, 'Failed to count elements.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == LIST.length, 'Failed to count elements.'
end
def test_parent
expression = "$parent.type='integer' or $parent.type='decimal'"
data = {}
parent = { 'type' => 'integer' }
- result = FluentPath.evaluate(expression,data,parent)
- assert result==true, 'Failed to access parent.'
+ result = FluentPath.evaluate(expression, data, parent)
+ assert result == true, 'Failed to access parent.'
end
def test_parent_nil
expression = "$parent.type='integer' or $parent.type='decimal'"
data = {}
- result = FluentPath.evaluate(expression,data)
- assert result==false, 'Failed to gracefully handle no $parent.'
+ result = FluentPath.evaluate(expression, data)
+ assert result == false, 'Failed to gracefully handle no $parent.'
end
-
-end
\ No newline at end of file
+end
diff --git a/test/unit/fluentpath/string_test.rb b/test/unit/fluentpath/string_test.rb
index d7692846f..03ca88a65 100644
--- a/test/unit/fluentpath/string_test.rb
+++ b/test/unit/fluentpath/string_test.rb
@@ -1,7 +1,6 @@
require_relative '../../test_helper'
class StringTest < Test::Unit::TestCase
-
DATA = {
'name' => 'John Doe',
'reference' => '#uri',
@@ -10,29 +9,28 @@ class StringTest < Test::Unit::TestCase
# ------------------------- startsWith() -------------------------------------
def test_startsWith_true
- result = FluentPath.evaluate("name.startsWith('John')",DATA)
- assert result==true, 'Failed startsWith test.'
+ result = FluentPath.evaluate("name.startsWith('John')", DATA)
+ assert result == true, 'Failed startsWith test.'
end
def test_startsWith_anchor
- result = FluentPath.evaluate("reference.startsWith('#')",DATA)
- assert result==true, 'Failed startsWith test.'
+ result = FluentPath.evaluate("reference.startsWith('#')", DATA)
+ assert result == true, 'Failed startsWith test.'
end
def test_startsWith_false
- result = FluentPath.evaluate("name.startsWith('Zoo')",DATA)
- assert result==false, 'Failed startsWith test.'
+ result = FluentPath.evaluate("name.startsWith('Zoo')", DATA)
+ assert result == false, 'Failed startsWith test.'
end
# ------------------------- contains() -------------------------------------
def test_contains_true
- result = FluentPath.evaluate("name.contains('hn')",DATA)
- assert result==true, 'Failed contains test.'
+ result = FluentPath.evaluate("name.contains('hn')", DATA)
+ assert result == true, 'Failed contains test.'
end
def test_contains_false
- result = FluentPath.evaluate("name.contains('.')",DATA)
- assert result==false, 'Failed contains test.'
+ result = FluentPath.evaluate("name.contains('.')", DATA)
+ assert result == false, 'Failed contains test.'
end
-
-end
\ No newline at end of file
+end
diff --git a/test/unit/json_format_test.rb b/test/unit/json_format_test.rb
index 7cbae1ce3..00e4a2557 100644
--- a/test/unit/json_format_test.rb
+++ b/test/unit/json_format_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class JsonFormatTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
ERROR_DIR = File.join('tmp', 'errors', 'JsonFormatTest')
ERROR_LOSSY_DIR = File.join('tmp', 'errors', 'JsonLossinessTest')
EXAMPLE_ROOT = File.join('lib', 'fhir_models', 'examples', 'json')
@@ -19,7 +15,7 @@ class JsonFormatTest < Test::Unit::TestCase
FileUtils.rm_rf(ERROR_LOSSY_DIR) if File.directory?(ERROR_LOSSY_DIR)
FileUtils.mkdir_p ERROR_LOSSY_DIR
- Dir.glob(example_files).each do | example_file |
+ Dir.glob(example_files).each do |example_file|
example_name = File.basename(example_file, '.json')
define_method("test_json_format_#{example_name}") do
run_json_roundtrip_test(example_file, example_name)
@@ -39,10 +35,10 @@ def run_json_roundtrip_test(example_file, example_name)
errors = compare(input_hash, output_hash)
- if !errors.empty?
- File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') {|file| file.write(errors.join("\n"))}
- File.open("#{ERROR_DIR}/#{example_name}_PRODUCED.json", 'w:UTF-8') {|file| file.write(output_json)}
- File.open("#{ERROR_DIR}/#{example_name}_ORIGINAL.json", 'w:UTF-8') {|file| file.write(input_json)}
+ unless errors.empty?
+ File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') { |file| file.write(errors.join("\n")) }
+ File.open("#{ERROR_DIR}/#{example_name}_PRODUCED.json", 'w:UTF-8') { |file| file.write(output_json) }
+ File.open("#{ERROR_DIR}/#{example_name}_ORIGINAL.json", 'w:UTF-8') { |file| file.write(input_json) }
end
assert errors.empty?, 'Differences in generated JSON vs original'
@@ -60,11 +56,11 @@ def run_json_xml_json_lossiness_test(example_file, example_name)
errors = compare(input_hash, output_hash)
- if !errors.empty?
- File.open("#{ERROR_LOSSY_DIR}/#{example_name}.err", 'w:UTF-8') {|file| file.write(errors.join("\n"))}
- File.open("#{ERROR_LOSSY_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') {|file| file.write(output_xml)}
- File.open("#{ERROR_LOSSY_DIR}/#{example_name}_PRODUCED.json", 'w:UTF-8') {|file| file.write(output_json)}
- File.open("#{ERROR_LOSSY_DIR}/#{example_name}_ORIGINAL.json", 'w:UTF-8') {|file| file.write(input_json)}
+ unless errors.empty?
+ File.open("#{ERROR_LOSSY_DIR}/#{example_name}.err", 'w:UTF-8') { |file| file.write(errors.join("\n")) }
+ File.open("#{ERROR_LOSSY_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') { |file| file.write(output_xml) }
+ File.open("#{ERROR_LOSSY_DIR}/#{example_name}_PRODUCED.json", 'w:UTF-8') { |file| file.write(output_json) }
+ File.open("#{ERROR_LOSSY_DIR}/#{example_name}_ORIGINAL.json", 'w:UTF-8') { |file| file.write(input_json) }
end
assert errors.empty?, 'Differences in generated JSON vs original'
@@ -80,10 +76,10 @@ def compare(hash_input, hash_output)
errors = []
added = hash_output.keys - hash_input.keys
- errors << "Added extra fields: #{added.join(', ')}" if !added.empty?
+ errors << "Added extra fields: #{added.join(', ')}" unless added.empty?
dropped = hash_input.keys - hash_output.keys
- errors << "Dropped fields: #{dropped.join(', ')}" if !dropped.empty?
+ errors << "Dropped fields: #{dropped.join(', ')}" unless dropped.empty?
shared_keys = hash_input.keys - dropped
shared_keys.each do |key|
@@ -99,19 +95,19 @@ def compare(hash_input, hash_output)
end
if input.is_a?(Array)
input.each_with_index do |item, index|
- itemB = output[index]
+ item_b = output[index]
if item.is_a?(Hash)
- errors += compare(item, itemB)
- elsif input!=output
+ errors += compare(item, item_b)
+ elsif input != output
errors << "#{key}[#{index}]: #{input} != #{output}"
end
end
- errors << "#{key}:\n - INPUT: #{input}\n - OUTPUT: #{output}" if input.size!=output.size
+ errors << "#{key}:\n - INPUT: #{input}\n - OUTPUT: #{output}" if input.size != output.size
elsif input.is_a?(Hash)
errors += compare(input, output)
elsif is_a_date_or_time(input) || is_a_date_or_time(output)
- # ignore date time formatting
- elsif input!=output
+ # ignore date time formatting
+ elsif input != output
errors << "#{key}:\n - INPUT: #{input}\n - OUTPUT: #{output}"
end
end
@@ -124,7 +120,7 @@ def strip_out_unsupported!(hash)
hash.each do |key, value|
delete_key = false
# delete fhir_comments and primitive extensions
- if key=='fhir_comments' || key.start_with?('_')
+ if key == 'fhir_comments' || key.start_with?('_')
delete_key = true
elsif value.is_a?(Array)
value.each do |thing|
@@ -139,18 +135,18 @@ def strip_out_unsupported!(hash)
end
def is_a_date_or_time(value)
- return false if !value.is_a?(String)
+ return false unless value.is_a?(String)
- ['date', 'dateTime', 'time'].each do |type|
+ %w(date dateTime time).each do |type|
meta = FHIR::PRIMITIVES[type]
expression = meta['regex']
regex = Regexp.new(expression)
- return true if !(regex =~ value).nil?
+ return true unless (regex =~ value).nil?
end
# when 'instant'
regex = /-?[0-9]{4}(-(0[1-9]|1[0-2])(-(0[0-9]|[1-2][0-9]|3[0-1])(T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9](\.[0-9]+)?(Z|(\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00)))))/
- return true if !(regex =~ value).nil?
+ return true unless (regex =~ value).nil?
false
@@ -168,5 +164,4 @@ def is_a_date_or_time(value)
# return true if !(regex =~ value).nil?
# false
end
-
end
diff --git a/test/unit/json_validation_test.rb b/test/unit/json_validation_test.rb
index 37af45d66..948854360 100644
--- a/test/unit/json_validation_test.rb
+++ b/test/unit/json_validation_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class JsonValidationTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
ERROR_DIR = File.join('tmp', 'errors', 'JsonValidationTest')
EXAMPLE_ROOT = File.join('lib', 'fhir_models', 'examples', 'json')
@@ -16,7 +12,7 @@ class JsonValidationTest < Test::Unit::TestCase
FileUtils.rm_rf(ERROR_DIR) if File.directory?(ERROR_DIR)
FileUtils.mkdir_p ERROR_DIR
- Dir.glob(example_files).each do | example_file |
+ Dir.glob(example_files).each do |example_file|
example_name = File.basename(example_file, '.json')
define_method("test_json_validation_#{example_name}") do
run_json_validation_test(example_file, example_name)
@@ -27,11 +23,10 @@ def run_json_validation_test(example_file, example_name)
input_json = File.read(example_file)
resource = FHIR::Json.from_json(input_json)
errors = resource.validate
- if !errors.empty?
- File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') {|file| file.write(JSON.pretty_unparse(errors))}
- File.open("#{ERROR_DIR}/#{example_name}.json", 'w:UTF-8') {|file| file.write(input_json)}
+ unless errors.empty?
+ File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') { |file| file.write(JSON.pretty_unparse(errors)) }
+ File.open("#{ERROR_DIR}/#{example_name}.json", 'w:UTF-8') { |file| file.write(input_json) }
end
assert errors.empty?, 'Resource failed to validate.'
end
-
end
diff --git a/test/unit/multiple_types_test.rb b/test/unit/multiple_types_test.rb
index 281b11451..78183722c 100644
--- a/test/unit/multiple_types_test.rb
+++ b/test/unit/multiple_types_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class MultipleTypesTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
# move this flag around multiple types and assert that it is found in the correct place
FLAG = 123
@@ -60,7 +56,7 @@ def test_multiple_types
def test_non_existing_multiple_types
obs = FHIR::Observation.new
- assert ((obs.foo rescue FLAG)==FLAG), 'Observation.foo should not exist'
+ assert ((obs.foo rescue FLAG) == FLAG), 'Observation.foo should not exist'
end
def test_multiple_cardinality
@@ -72,5 +68,4 @@ def test_multiple_cardinality
end
end
end
-
end
diff --git a/test/unit/profile_validation_test.rb b/test/unit/profile_validation_test.rb
index 2b14218f9..51cdcc3f7 100644
--- a/test/unit/profile_validation_test.rb
+++ b/test/unit/profile_validation_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class ProfileValidationTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
ERROR_DIR = File.join('tmp', 'errors', 'ProfileValidationTest')
EXAMPLE_ROOT = File.join('lib', 'fhir_models', 'examples', 'json')
@@ -16,7 +12,7 @@ class ProfileValidationTest < Test::Unit::TestCase
FileUtils.rm_rf(ERROR_DIR) if File.directory?(ERROR_DIR)
FileUtils.mkdir_p ERROR_DIR
- Dir.glob(example_files).each do | example_file |
+ Dir.glob(example_files).each do |example_file|
example_name = File.basename(example_file, '.json')
define_method("test_profile_validation_#{example_name}") do
run_profile_validation_test(example_file, example_name)
@@ -30,9 +26,9 @@ def run_profile_validation_test(example_file, example_name)
profile = FHIR::Definitions.get_profile(profile_uri)
assert profile.is_a?(FHIR::StructureDefinition), 'Profile is not a valid StructureDefinition.'
errors = profile.validate_resource(resource)
- if !errors.empty?
- File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') {|file| errors.each{|e| file.write("#{e}\n")}}
- File.open("#{ERROR_DIR}/#{example_name}.json", 'w:UTF-8') {|file| file.write(input_json)}
+ unless errors.empty?
+ File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') { |file| errors.each { |e| file.write("#{e}\n") } }
+ File.open("#{ERROR_DIR}/#{example_name}.json", 'w:UTF-8') { |file| file.write(input_json) }
end
assert errors.empty?, 'Resource failed to validate.'
end
@@ -40,9 +36,8 @@ def run_profile_validation_test(example_file, example_name)
def test_language_binding_validation
binding_strength = FHIR::Resource::METADATA['language']['binding']['strength']
FHIR::Resource::METADATA['language']['binding']['strength'] = 'required'
- model = FHIR::Resource.new({'language'=>'en-US'})
+ model = FHIR::Resource.new('language' => 'en-US')
assert model.is_valid?, 'Language validation failed.'
- FHIR::Resource::METADATA['language']['binding']['strength'] = binding_strength
+ FHIR::Resource::METADATA['language']['binding']['strength'] = binding_strength
end
-
end
diff --git a/test/unit/xml_format_test.rb b/test/unit/xml_format_test.rb
index b51e93abd..d1debc035 100644
--- a/test/unit/xml_format_test.rb
+++ b/test/unit/xml_format_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class XmlFormatTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
ERROR_DIR = File.join('tmp', 'errors', 'XmlFormatTest')
ERROR_LOSSY_DIR = File.join('tmp', 'errors', 'XmlLossinessTest')
EXAMPLE_ROOT = File.join('lib', 'fhir_models', 'examples', 'xml')
@@ -19,7 +15,7 @@ class XmlFormatTest < Test::Unit::TestCase
FileUtils.rm_rf(ERROR_LOSSY_DIR) if File.directory?(ERROR_LOSSY_DIR)
FileUtils.mkdir_p ERROR_LOSSY_DIR
- Dir.glob(example_files).each do | example_file |
+ Dir.glob(example_files).each do |example_file|
example_name = File.basename(example_file, '.xml')
define_method("test_xml_format_#{example_name}") do
run_xml_roundtrip_test(example_file, example_name)
@@ -41,10 +37,10 @@ def run_xml_roundtrip_test(example_file, example_name)
clean_nodes(output_nodes.root)
errors = calculate_errors(input_nodes, output_nodes)
- if !errors.empty?
- File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') {|file| file.write(errors.map{|x| "#{x.first} #{x.last.to_xml}"}.join("\n"))}
- File.open("#{ERROR_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') {|file| file.write(output_xml)}
- File.open("#{ERROR_DIR}/#{example_name}_ORIGINAL.xml", 'w:UTF-8') {|file| file.write(input_xml)}
+ unless errors.empty?
+ File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') { |file| file.write(errors.map { |x| "#{x.first} #{x.last.to_xml}" }.join("\n")) }
+ File.open("#{ERROR_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') { |file| file.write(output_xml) }
+ File.open("#{ERROR_DIR}/#{example_name}_ORIGINAL.xml", 'w:UTF-8') { |file| file.write(input_xml) }
end
assert errors.empty?, 'Differences in generated XML vs original'
@@ -64,10 +60,10 @@ def run_xml_json_xml_lossiness_test(example_file, example_name)
clean_nodes(output_nodes.root)
errors = calculate_errors(input_nodes, output_nodes)
- if !errors.empty?
- File.open("#{ERROR_LOSSY_DIR}/#{example_name}.err", 'w:UTF-8') {|file| file.write(errors.map{|x| "#{x.first} #{x.last.to_xml}"}.join("\n"))}
- File.open("#{ERROR_LOSSY_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') {|file| file.write(output_xml)}
- File.open("#{ERROR_LOSSY_DIR}/#{example_name}_ORIGINAL.xml", 'w:UTF-8') {|file| file.write(input_xml)}
+ unless errors.empty?
+ File.open("#{ERROR_LOSSY_DIR}/#{example_name}.err", 'w:UTF-8') { |file| file.write(errors.map { |x| "#{x.first} #{x.last.to_xml}" }.join("\n")) }
+ File.open("#{ERROR_LOSSY_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') { |file| file.write(output_xml) }
+ File.open("#{ERROR_LOSSY_DIR}/#{example_name}_ORIGINAL.xml", 'w:UTF-8') { |file| file.write(input_xml) }
end
assert errors.empty?, 'Differences in generated XML vs original'
@@ -77,9 +73,9 @@ def calculate_errors(input_nodes, output_nodes)
errors = input_nodes.diff(output_nodes, added: true, removed: true).to_a
errors.keep_if do |error|
# we do not support the preservation of comments, ignore them
- is_comment = (error.last.class==Nokogiri::XML::Comment)
+ is_comment = (error.last.class == Nokogiri::XML::Comment)
# we do not care about empty whitespace
- is_empty_text = (error.last.class==Nokogiri::XML::Text && error.last.text.strip=='')
+ is_empty_text = (error.last.class == Nokogiri::XML::Text && error.last.text.strip == '')
!(is_comment || is_empty_text)
end
# we do not care about preservation of trailing zeros
@@ -87,7 +83,7 @@ def calculate_errors(input_nodes, output_nodes)
left = []
right = []
errors.each do |error|
- if error.first=='-'
+ if error.first == '-'
left << error
else
right << error
@@ -96,7 +92,7 @@ def calculate_errors(input_nodes, output_nodes)
regex = /-?([0]|([1-9][0-9]*))(\\.[0-9]+)?/
left.each_with_index do |error, index|
right_error = right[index]
- two_numerics = ( (error.last.value =~ regex) && (right_error.last.value =~regex) )
+ two_numerics = ((error.last.value =~ regex) && (right_error.last.value =~ regex))
if two_numerics && (error.last.value.to_f == right_error.last.value.to_f)
errors.delete(error)
errors.delete(right_error)
@@ -109,13 +105,12 @@ def calculate_errors(input_nodes, output_nodes)
# process input to remove leading and trailing newlines and whitespace around text
def clean_nodes(node)
node.children.each do |child|
- child.content = child.content.strip if(child.is_a?(Nokogiri::XML::Text))
+ child.content = child.content.strip if child.is_a?(Nokogiri::XML::Text)
if child.has_attribute?('value')
# remove all the children -- these will be primitive extensions which we do not support.
child.children = ''
end
- clean_nodes(child) if !child.children.empty?
+ clean_nodes(child) unless child.children.empty?
end
end
-
end
diff --git a/test/unit/xml_schema_validation_test.rb b/test/unit/xml_schema_validation_test.rb
index 3fa65910e..4740d5671 100644
--- a/test/unit/xml_schema_validation_test.rb
+++ b/test/unit/xml_schema_validation_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class XmlSchemaValidationTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
ERROR_DIR = File.join('tmp', 'errors', 'XmlSchemaValidationTest')
EXAMPLE_ROOT = File.join('lib', 'fhir_models', 'examples', 'xml')
@@ -19,7 +15,7 @@ class XmlSchemaValidationTest < Test::Unit::TestCase
FileUtils.rm_rf(ERROR_DIR) if File.directory?(ERROR_DIR)
FileUtils.mkdir_p ERROR_DIR
- Dir.glob(example_files).each do | example_file |
+ Dir.glob(example_files).each do |example_file|
example_name = File.basename(example_file, '.xml')
define_method("test_xml_schema_validation_#{example_name}") do
run_xml_schema_validation_test(example_file, example_name)
@@ -32,37 +28,37 @@ def run_xml_schema_validation_test(example_file, example_name)
assert !resource.nil?
output_xml = resource.to_xml
- assert output_xml.length > 0
+ assert !output_xml.empty?
errors_input = XSD.validate(Nokogiri::XML(input_xml))
errors_output = XSD.validate(Nokogiri::XML(output_xml))
original_errors = false
- if (!errors_input.empty?)
+ unless errors_input.empty?
puts " WARNING: validation errors in example: #{example_name}"
- if (errors_input.length == errors_output.length)
+ if errors_input.length == errors_output.length
errors_match = true
- (0...(errors_input.length)).each {|i| errors_match &&= (errors_output[i].message == errors_input[i].message)}
+ (0...(errors_input.length)).each { |i| errors_match &&= (errors_output[i].message == errors_input[i].message) }
original_errors = errors_match
end
end
if !errors_output.empty? && !original_errors
- File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') do | file |
+ File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') do |file|
file.write "#{example_name}: #{errors_output.length} errors\n\n"
errors_output.each do |error|
file.write(sprintf("%-8d %s\n", error.line, error.message))
end
- if !errors_input.empty?
+ unless errors_input.empty?
file.write('ORIGINAL ERRORS: ')
errors_input.each do |error|
file.write(sprintf("%-8d %s\n", error.line, error.message))
end
end
end
- File.open("#{ERROR_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') {|file| file.write(output_xml)}
- File.open("#{ERROR_DIR}/#{example_name}_ORIGINAL.xml", 'w:UTF-8') {|file| file.write(input_xml)}
+ File.open("#{ERROR_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') { |file| file.write(output_xml) }
+ File.open("#{ERROR_DIR}/#{example_name}_ORIGINAL.xml", 'w:UTF-8') { |file| file.write(input_xml) }
end
assert errors_output.empty? || original_errors, "Schema Validation errors: \n #{errors_output.join('\n')}"
diff --git a/test/unit/xml_validation_test.rb b/test/unit/xml_validation_test.rb
index a91eb6398..e08a7d62a 100644
--- a/test/unit/xml_validation_test.rb
+++ b/test/unit/xml_validation_test.rb
@@ -1,10 +1,6 @@
require_relative '../test_helper'
class XmlValidationTest < Test::Unit::TestCase
-
- # turn off the ridiculous warnings
- $VERBOSE=nil
-
ERROR_DIR = File.join('tmp', 'errors', 'XmlValidationTest')
EXAMPLE_ROOT = File.join('lib', 'fhir_models', 'examples', 'xml')
@@ -16,7 +12,7 @@ class XmlValidationTest < Test::Unit::TestCase
FileUtils.rm_rf(ERROR_DIR) if File.directory?(ERROR_DIR)
FileUtils.mkdir_p ERROR_DIR
- Dir.glob(example_files).each do | example_file |
+ Dir.glob(example_files).each do |example_file|
example_name = File.basename(example_file, '.xml')
define_method("test_xml_validation_#{example_name}") do
run_xml_validation_test(example_file, example_name)
@@ -27,9 +23,9 @@ def run_xml_validation_test(example_file, example_name)
input_xml = File.read(example_file)
resource = FHIR::Xml.from_xml(input_xml)
errors = resource.validate
- if !errors.empty?
- File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') {|file| file.write(JSON.pretty_unparse(errors))}
- File.open("#{ERROR_DIR}/#{example_name}.xml", 'w:UTF-8') {|file| file.write(input_xml)}
+ unless errors.empty?
+ File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') { |file| file.write(JSON.pretty_unparse(errors)) }
+ File.open("#{ERROR_DIR}/#{example_name}.xml", 'w:UTF-8') { |file| file.write(input_xml) }
end
assert errors.empty?, 'Resource failed to validate.'
end
@@ -46,5 +42,4 @@ def test_resource_is_valid
resource = FHIR::Xml.from_xml(xml)
assert resource.is_valid?, 'Resource failed to validate.'
end
-
end