Merge pull request #1781 from hathach/update-unit-test

Update unit test
This commit is contained in:
Ha Thach 2022-12-08 10:11:21 +07:00 committed by GitHub
commit ab8cfb3d5b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
261 changed files with 5827 additions and 7766 deletions

View File

@ -7,4 +7,4 @@ exclude-file = .codespell/exclude-file.txt
check-filenames =
check-hidden =
count =
skip = .cproject,./.git,./hw/mcu,./lib,./examples/*/*/_build,./examples/*/*/ses,./examples/*/*/ozone,./hw/mcu,./test/vendor,./tests_obsolete,./tools/uf2
skip = .cproject,./.git,./hw/mcu,./lib,./examples/*/*/_build,./examples/*/*/ses,./examples/*/*/ozone,./hw/mcu,./test/unit-test/vendor,./tests_obsolete,./tools/uf2

View File

@ -1,11 +1,19 @@
name: Build AArch64
on:
pull_request:
push:
release:
types:
- created
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
pull_request:
branches: [ master ]
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}

View File

@ -1,11 +1,19 @@
name: Build ARM
on:
pull_request:
push:
release:
types:
- created
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
pull_request:
branches: [ master ]
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}

View File

@ -1,11 +1,19 @@
name: Build ESP
on:
pull_request:
push:
release:
types:
- created
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
pull_request:
branches: [ master ]
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}

View File

@ -1,11 +1,19 @@
name: Build MSP430
on:
pull_request:
push:
release:
types:
- created
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
pull_request:
branches: [ master ]
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}

View File

@ -1,11 +1,19 @@
name: Build Renesas
on:
pull_request:
push:
release:
types:
- created
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
pull_request:
branches: [ master ]
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}

View File

@ -1,11 +1,19 @@
name: Build RISC-V
on:
pull_request:
push:
release:
types:
- created
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
pull_request:
branches: [ master ]
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}

View File

@ -1,8 +1,9 @@
name: pre-commit
on:
pull_request:
push:
pull_request:
branches: [ master ]
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
@ -20,7 +21,7 @@ jobs:
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '2.7'
ruby-version: '3.0'
- name: Checkout TinyUSB
uses: actions/checkout@v3
@ -32,5 +33,5 @@ jobs:
run: |
# Install Ceedling
gem install ceedling
cd test
cd test/unit-test
ceedling test:all

View File

@ -1,10 +1,19 @@
name: Hardware Test
on:
pull_request:
push:
release:
types:
- created
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
pull_request:
branches: [ master ]
paths:
- 'src'
- 'examples'
- 'lib'
- 'hw'
# Hardware in the loop (HIL)
# Current self-hosted instance is running on an EPYC 7232 server hosted by HiFiPhile user

View File

@ -15,6 +15,7 @@
# :release_build: TRUE
:test_file_prefix: test_
:which_ceedling: vendor/ceedling
:ceedling_version: 0.31.1
:default_tasks:
- test:all
@ -35,7 +36,7 @@
- +:test/**
- -:test/support
:source:
- ../src/**
- ../../src/**
:support:
- test/support

View File

@ -51,7 +51,7 @@
// CFG_TUSB_DEBUG is defined by compiler in DEBUG build
#ifndef CFG_TUSB_DEBUG
#define CFG_TUSB_DEBUG 0
#define CFG_TUSB_DEBUG 1
#endif
/* USB DMA on some MCUs can only access a specific SRAM region with restriction on alignment.

View File

@ -49,16 +49,16 @@ unless (project_found)
end
desc "upgrade PROJECT_NAME", "upgrade ceedling for a project (not req'd if gem used)"
method_option :docs, :type => :boolean, :default => false, :desc => "Add docs in project vendor directory"
method_option :local, :type => :boolean, :default => false, :desc => "Create a copy of Ceedling in the project vendor directory"
method_option :no_configs, :type => :boolean, :default => false, :desc => "Don't install starter configuration files"
method_option :noconfigs, :type => :boolean, :default => false
#deprecated:
method_option :no_docs, :type => :boolean, :default => false
method_option :nodocs, :type => :boolean, :default => false
def upgrade(name, silent = false)
copy_assets_and_create_structure(name, silent, true, options || {:upgrade => true})
as_local = true
begin
require "yaml"
as_local = (YAML.load_file(File.join(name, "project.yml"))[:project][:which_ceedling] != 'gem')
rescue
raise "ERROR: Could not find valid project file '#{yaml_path}'"
end
found_docs = File.exists?( File.join(name, "docs", "CeedlingPacket.md") )
copy_assets_and_create_structure(name, silent, true, {:upgrade => true, :no_configs => true, :local => as_local, :docs => found_docs})
end
no_commands do
@ -90,26 +90,30 @@ unless (project_found)
FileUtils.touch(File.join(test_support_path, '.gitkeep'))
# If documentation requested, create a place to dump them and do so
doc_path = ""
if use_docs
doc_path = File.join(ceedling_path, 'docs')
doc_path = use_gem ? File.join(name, 'docs') : File.join(ceedling_path, 'docs')
FileUtils.mkdir_p doc_path
in_doc_path = lambda {|f| File.join(doc_path, f)}
doc_files = [
'docs/CeedlingPacket.md',
'vendor/c_exception/docs/CException.md',
'vendor/cmock/docs/CMock_Summary.md',
'vendor/unity/docs/UnityAssertionsCheatSheetSuitableforPrintingandPossiblyFraming.pdf',
'vendor/unity/docs/UnityAssertionsReference.md',
'vendor/unity/docs/UnityConfigurationGuide.md',
'vendor/unity/docs/UnityGettingStartedGuide.md',
'vendor/unity/docs/UnityHelperScriptsGuide.md',
'vendor/unity/docs/ThrowTheSwitchCodingStandard.md',
]
# Add documentation from main projects to list
doc_files = {}
['docs','vendor/unity/docs','vendor/cmock/docs','vendor/cexception/docs'].each do |p|
Dir[ File.expand_path(File.join(here, p, '*.md')) ].each do |f|
doc_files[ File.basename(f) ] = f unless(doc_files.include? f)
end
end
doc_files.each do |f|
copy_file(f, in_doc_path.call(File.basename(f)), :force => force)
# Add documentation from plugins to list
Dir[ File.join(here, 'plugins/**/README.md') ].each do |plugin_path|
k = "plugin_" + plugin_path.split(/\\|\//)[-2] + ".md"
doc_files[ k ] = File.expand_path(plugin_path)
end
# Copy all documentation
doc_files.each_pair do |k, v|
copy_file(v, in_doc_path.call(k), :force => force)
end
end
@ -133,7 +137,6 @@ unless (project_found)
{:src => 'vendor/cmock/config/', :dst => 'vendor/cmock/config'},
{:src => 'vendor/cmock/lib/', :dst => 'vendor/cmock/lib'},
{:src => 'vendor/cmock/src/', :dst => 'vendor/cmock/src'},
{:src => 'vendor/deep_merge/lib/', :dst => 'vendor/deep_merge/lib'},
{:src => 'vendor/diy/lib', :dst => 'vendor/diy/lib'},
{:src => 'vendor/unity/auto/', :dst => 'vendor/unity/auto'},
{:src => 'vendor/unity/src/', :dst => 'vendor/unity/src'},
@ -146,16 +149,24 @@ unless (project_found)
# We're copying in a configuration file if we haven't said not to
if (use_configs)
if use_gem
copy_file(File.join('assets', 'project_as_gem.yml'), File.join(name, 'project.yml'), :force => force)
dst_yaml = File.join(name, 'project.yml')
src_yaml = if use_gem
File.join(here, 'assets', 'project_as_gem.yml')
else
copy_file(File.join('assets', 'project_with_guts.yml'), File.join(name, 'project.yml'), :force => force)
if is_windows?
copy_file(File.join('assets', 'ceedling.cmd'), File.join(name, 'ceedling.cmd'), :force => force)
else
copy_file(File.join('assets', 'ceedling'), File.join(name, 'ceedling'), :force => force)
File.chmod(0755, File.join(name, 'ceedling'))
end
File.join(here, 'assets', 'project_with_guts.yml')
end
# Perform the actual clone of the config file, while updating the version
File.open(dst_yaml,'w') do |dst|
require File.expand_path(File.join(File.dirname(__FILE__),"..","lib","ceedling","version.rb"))
dst << File.read(src_yaml).gsub(":ceedling_version: '?'",":ceedling_version: #{Ceedling::Version::CEEDLING}")
puts " create #{dst_yaml}"
end
end
@ -167,8 +178,8 @@ unless (project_found)
unless silent
puts "\n"
puts "Project '#{name}' #{force ? "upgraded" : "created"}!"
puts " - Tool documentation is located in vendor/ceedling/docs" if use_docs
puts " - Execute 'ceedling help' to view available test & build tasks"
puts " - Tool documentation is located in #{doc_path}" if use_docs
puts " - Execute 'ceedling help' from #{name} to view available test & build tasks"
puts ''
end
end
@ -206,10 +217,10 @@ unless (project_found)
desc "version", "return the version of the tools installed"
def version()
require 'ceedling/version.rb'
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
puts " CMock:: #{Ceedling::Version::CMOCK}"
puts " Unity:: #{Ceedling::Version::UNITY}"
require File.expand_path(File.join(File.dirname(__FILE__),"..","lib","ceedling","version.rb"))
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
puts " CMock:: #{Ceedling::Version::CMOCK}"
puts " Unity:: #{Ceedling::Version::UNITY}"
puts " CException:: #{Ceedling::Version::CEXCEPTION}"
end
end
@ -287,6 +298,8 @@ else
options[:list_tasks] = true
when /^-T$/
options[:list_tasks] = true
when /^--tasks$/
options[:list_tasks] = true
when /^project:(\w+)/
ENV['CEEDLING_USER_PROJECT_FILE'] = "#{$1}.yml"
else

View File

@ -9,7 +9,7 @@ class BuildInvokerUtils
##
# Processes exceptions and tries to display a useful message for the user.
#
# ==== Attriboops...utes
# ==== Attributes
#
# * _exception_: The exception given by a rescue statement.
# * _context_: A symbol representing where in the build the exception

View File

@ -0,0 +1,35 @@
class CacheinatorHelper
constructor :file_wrapper, :yaml_wrapper
def diff_cached_config?(cached_filepath, hash)
return false if ( not @file_wrapper.exist?(cached_filepath) )
return true if (@yaml_wrapper.load(cached_filepath) != hash)
return false
end
def diff_cached_defines?(cached_filepath, files)
changed_defines = false
current_defines = COLLECTION_DEFINES_TEST_AND_VENDOR.reject(&:empty?)
current_dependencies = Hash[files.collect { |source| [source, current_defines.dup] }]
if not @file_wrapper.exist?(cached_filepath)
@yaml_wrapper.dump(cached_filepath, current_dependencies)
return changed_defines
end
dependencies = @yaml_wrapper.load(cached_filepath)
common_dependencies = current_dependencies.select { |file, defines| dependencies.has_key?(file) }
if dependencies.values_at(*common_dependencies.keys) != common_dependencies.values
changed_defines = true
end
dependencies.merge!(current_dependencies)
@yaml_wrapper.dump(cached_filepath, dependencies)
return changed_defines
end
end

View File

@ -54,6 +54,7 @@ class Configurator
:test_fixture,
:test_includes_preprocessor,
:test_file_preprocessor,
:test_file_preprocessor_directives,
:test_dependencies_generator,
:release_compiler,
:release_assembler,
@ -183,17 +184,22 @@ class Configurator
@rake_plugins = @configurator_plugins.find_rake_plugins(config, paths_hash)
@script_plugins = @configurator_plugins.find_script_plugins(config, paths_hash)
config_plugins = @configurator_plugins.find_config_plugins(config, paths_hash)
plugin_defaults = @configurator_plugins.find_plugin_defaults(config, paths_hash)
plugin_yml_defaults = @configurator_plugins.find_plugin_yml_defaults(config, paths_hash)
plugin_hash_defaults = @configurator_plugins.find_plugin_hash_defaults(config, paths_hash)
config_plugins.each do |plugin|
plugin_config = @yaml_wrapper.load(plugin)
config.deep_merge(plugin_config)
end
plugin_defaults.each do |defaults|
plugin_yml_defaults.each do |defaults|
@configurator_builder.populate_defaults( config, @yaml_wrapper.load(defaults) )
end
plugin_hash_defaults.each do |defaults|
@configurator_builder.populate_defaults( config, defaults )
end
# special plugin setting for results printing
config[:plugins][:display_raw_test_results] = true if (config[:plugins][:display_raw_test_results].nil?)
@ -203,10 +209,19 @@ class Configurator
def merge_imports(config)
if config[:import]
until config[:import].empty?
path = config[:import].shift
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
config.deep_merge!(@yaml_wrapper.load(path))
if config[:import].is_a? Array
until config[:import].empty?
path = config[:import].shift
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
config.deep_merge!(@yaml_wrapper.load(path))
end
else
config[:import].each_value do |path|
if !path.nil?
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
config.deep_merge!(@yaml_wrapper.load(path))
end
end
end
end
config.delete(:import)
@ -222,7 +237,11 @@ class Configurator
interstitial = ((key == :path) ? File::PATH_SEPARATOR : '')
items = ((value.class == Array) ? hash[key] : [value])
items.each { |item| item.replace( @system_wrapper.module_eval( item ) ) if (item =~ RUBY_STRING_REPLACEMENT_PATTERN) }
items.each do |item|
if item.is_a? String and item =~ RUBY_STRING_REPLACEMENT_PATTERN
item.replace( @system_wrapper.module_eval( item ) )
end
end
hash[key] = items.join( interstitial )
@system_wrapper.env_set( key.to_s.upcase, hash[key] )

View File

@ -250,8 +250,8 @@ class ConfiguratorBuilder
def collect_test_support_source_include_vendor_paths(in_hash)
return {
:collection_paths_test_support_source_include_vendor =>
in_hash[:collection_paths_test_support_source_include] +
get_vendor_paths(in_hash)
get_vendor_paths(in_hash) +
in_hash[:collection_paths_test_support_source_include]
}
end
@ -384,14 +384,26 @@ class ConfiguratorBuilder
end
def get_vendor_defines(in_hash)
defines = in_hash[:unity_defines].clone
defines.concat(in_hash[:cmock_defines]) if (in_hash[:project_use_mocks])
defines.concat(in_hash[:cexception_defines]) if (in_hash[:project_use_exceptions])
return defines
end
def collect_vendor_defines(in_hash)
return {:collection_defines_vendor => get_vendor_defines(in_hash)}
end
def collect_test_and_vendor_defines(in_hash)
test_defines = in_hash[:defines_test].clone
defines = in_hash[:defines_test].clone
vendor_defines = get_vendor_defines(in_hash)
defines.concat(vendor_defines) if vendor_defines
test_defines.concat(in_hash[:unity_defines])
test_defines.concat(in_hash[:cmock_defines]) if (in_hash[:project_use_mocks])
test_defines.concat(in_hash[:cexception_defines]) if (in_hash[:project_use_exceptions])
return {:collection_defines_test_and_vendor => test_defines}
return {:collection_defines_test_and_vendor => defines}
end
@ -418,28 +430,33 @@ class ConfiguratorBuilder
# Note: Symbols passed to compiler at command line can change Unity and CException behavior / configuration;
# we also handle those dependencies elsewhere in compilation dependencies
objects = [UNITY_C_FILE]
sources = [UNITY_C_FILE]
in_hash[:files_support].each { |file| objects << File.basename(file) }
in_hash[:files_support].each { |file| sources << file }
# we don't include paths here because use of plugins or mixing different compilers may require different build paths
objects << CEXCEPTION_C_FILE if (in_hash[:project_use_exceptions])
objects << CMOCK_C_FILE if (in_hash[:project_use_mocks])
sources << CEXCEPTION_C_FILE if (in_hash[:project_use_exceptions])
sources << CMOCK_C_FILE if (in_hash[:project_use_mocks])
# if we're using mocks & a unity helper is defined & that unity helper includes a source file component (not only a header of macros),
# then link in the unity_helper object file too
if ( in_hash[:project_use_mocks] and in_hash[:cmock_unity_helper] )
in_hash[:cmock_unity_helper].each do |helper|
if @file_wrapper.exist?(helper.ext(in_hash[:extension_source]))
objects << File.basename(helper)
sources << helper
end
end
end
# create object files from all the sources
objects = sources.map { |file| File.basename(file) }
# no build paths here so plugins can remap if necessary (i.e. path mapping happens at runtime)
objects.map! { |object| object.ext(in_hash[:extension_object]) }
return { :collection_test_fixture_extra_link_objects => objects }
return { :collection_all_support => sources,
:collection_test_fixture_extra_link_objects => objects
}
end

View File

@ -26,6 +26,7 @@ class ConfiguratorPlugins
if is_script_plugin
@system_wrapper.add_load_path( File.join( path, 'lib') )
@system_wrapper.add_load_path( File.join( path, 'config') )
end
break
end
@ -92,7 +93,7 @@ class ConfiguratorPlugins
# gather up and return default .yml filepaths that exist on-disk
def find_plugin_defaults(config, plugin_paths)
def find_plugin_yml_defaults(config, plugin_paths)
defaults_with_path = []
config[:plugins][:enabled].each do |plugin|
@ -108,4 +109,23 @@ class ConfiguratorPlugins
return defaults_with_path
end
# gather up and return
def find_plugin_hash_defaults(config, plugin_paths)
defaults_hash= []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
default_path = File.join(path, "config", "defaults_#{plugin}.rb")
if @file_wrapper.exist?(default_path)
@system_wrapper.require_file( "defaults_#{plugin}.rb")
object = eval("get_default_config()")
defaults_hash << object
end
end
end
return defaults_hash
end
end

View File

@ -39,6 +39,7 @@ class ConfiguratorSetup
flattened_config.merge!(@configurator_builder.collect_headers(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_existing_compilation_input(flattened_config))
flattened_config.merge!(@configurator_builder.collect_all_existing_compilation_input(flattened_config))
flattened_config.merge!(@configurator_builder.collect_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_and_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_and_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_artifact_extra_link_objects(flattened_config))

View File

@ -95,3 +95,5 @@ NULL_FILE_PATH = '/dev/null'
TESTS_BASE_PATH = TEST_ROOT_NAME
RELEASE_BASE_PATH = RELEASE_ROOT_NAME
VENDORS_FILES = %w(unity UnityHelper cmock CException).freeze

View File

@ -7,17 +7,20 @@ CEEDLING_VENDOR = File.expand_path(File.dirname(__FILE__) + '/../../vendor') unl
CEEDLING_PLUGINS = [] unless defined? CEEDLING_PLUGINS
DEFAULT_TEST_COMPILER_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_compiler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
"-DGNU_COMPILER".freeze,
"-g".freeze,
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
"-c \"${1}\"".freeze,
"-o \"${2}\"".freeze,
# gcc's list file output options are complex; no use of ${3} parameter in default config
@ -27,16 +30,21 @@ DEFAULT_TEST_COMPILER_TOOL = {
}
DEFAULT_TEST_LINKER_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:executable => ENV['CCLD'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CCLD'].split[0],
:name => 'default_test_linker'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CCLD'].nil? ? "" : ENV['CCLD'].split[1..-1],
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
ENV['LDFLAGS'].nil? ? "" : ENV['LDFLAGS'].split,
"\"${1}\"".freeze,
"${5}".freeze,
"-o \"${2}\"".freeze,
"".freeze,
"${4}".freeze
"${4}".freeze,
ENV['LDLIBS'].nil? ? "" : ENV['LDLIBS'].split
].freeze
}
@ -50,12 +58,14 @@ DEFAULT_TEST_FIXTURE_TOOL = {
}
DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_includes_preprocessor'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze, # OSX clang
'-MM'.freeze,
'-MG'.freeze,
@ -67,18 +77,38 @@ DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL = {
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze, # OSX clang
'-w'.freeze,
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze
].freeze
}
DEFAULT_TEST_FILE_PREPROCESSOR_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_file_preprocessor'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze,
"-o \"${2}\"".freeze
].freeze
}
DEFAULT_TEST_FILE_PREPROCESSOR_DIRECTIVES_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:name => 'default_test_file_preprocessor_directives'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
@ -86,6 +116,7 @@ DEFAULT_TEST_FILE_PREPROCESSOR_TOOL = {
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
'-fdirectives-only'.freeze,
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze,
"-o \"${2}\"".freeze
@ -100,12 +131,14 @@ else
end
DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_dependencies_generator'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
@ -123,12 +156,14 @@ DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL = {
}
DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_release_dependencies_generator'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
@ -147,16 +182,19 @@ DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL = {
DEFAULT_RELEASE_COMPILER_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_release_compiler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_RELEASE_AND_VENDOR'}.freeze,
"-DGNU_COMPILER".freeze,
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
"-c \"${1}\"".freeze,
"-o \"${2}\"".freeze,
# gcc's list file output options are complex; no use of ${3} parameter in default config
@ -166,12 +204,14 @@ DEFAULT_RELEASE_COMPILER_TOOL = {
}
DEFAULT_RELEASE_ASSEMBLER_TOOL = {
:executable => FilePathUtils.os_executable_ext('as').freeze,
:executable => ENV['AS'].nil? ? FilePathUtils.os_executable_ext('as').freeze : ENV['AS'].split[0],
:name => 'default_release_assembler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['AS'].nil? ? "" : ENV['AS'].split[1..-1],
ENV['ASFLAGS'].nil? ? "" : ENV['ASFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_AND_INCLUDE'}.freeze,
"\"${1}\"".freeze,
"-o \"${2}\"".freeze,
@ -179,16 +219,21 @@ DEFAULT_RELEASE_ASSEMBLER_TOOL = {
}
DEFAULT_RELEASE_LINKER_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:executable => ENV['CCLD'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CCLD'].split[0],
:name => 'default_release_linker'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CCLD'].nil? ? "" : ENV['CCLD'].split[1..-1],
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
ENV['LDFLAGS'].nil? ? "" : ENV['LDFLAGS'].split,
"\"${1}\"".freeze,
"${5}".freeze,
"-o \"${2}\"".freeze,
"".freeze,
"${4}".freeze
"${4}".freeze,
ENV['LDLIBS'].nil? ? "" : ENV['LDLIBS'].split
].freeze
}
@ -205,6 +250,7 @@ DEFAULT_TOOLS_TEST_PREPROCESSORS = {
:tools => {
:test_includes_preprocessor => DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL,
:test_file_preprocessor => DEFAULT_TEST_FILE_PREPROCESSOR_TOOL,
:test_file_preprocessor_directives => DEFAULT_TEST_FILE_PREPROCESSOR_DIRECTIVES_TOOL,
}
}
@ -245,8 +291,10 @@ DEFAULT_CEEDLING_CONFIG = {
:compile_threads => 1,
:test_threads => 1,
:use_test_preprocessor => false,
:use_preprocessor_directives => false,
:use_deep_dependencies => false,
:generate_deep_dependencies => true, # only applicable if use_deep_dependencies is true
:auto_link_deep_dependencies => false,
:test_file_prefix => 'test_',
:options_paths => [],
:release_build => false,
@ -263,6 +311,7 @@ DEFAULT_CEEDLING_CONFIG = {
:source => [], # must be populated by user
:support => [],
:include => [],
:libraries => [],
:test_toolchain_include => [],
:release_toolchain_include => [],
},
@ -290,6 +339,8 @@ DEFAULT_CEEDLING_CONFIG = {
},
:libraries => {
:flag => '-l${1}',
:path_flag => '-L ${1}',
:test => [],
:test_preprocess => [],
:release => [],
@ -303,6 +354,7 @@ DEFAULT_CEEDLING_CONFIG = {
:source => '.c',
:assembly => '.s',
:object => '.o',
:libraries => ['.a','.so'],
:executable => ( SystemWrapper.windows? ? EXTENSION_WIN_EXE : EXTENSION_NONWIN_EXE ),
:map => '.map',
:list => '.lst',
@ -345,6 +397,7 @@ DEFAULT_CEEDLING_CONFIG = {
},
:test_includes_preprocessor => { :arguments => [] },
:test_file_preprocessor => { :arguments => [] },
:test_file_preprocessor_directives => { :arguments => [] },
:test_dependencies_generator => { :arguments => [] },
:release_compiler => { :arguments => [] },
:release_linker => { :arguments => [] },

View File

@ -86,13 +86,12 @@ class Dependinator
def enhance_results_dependencies(result_filepath)
@rake_wrapper[result_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
@rake_wrapper[result_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if @project_config_manager.test_config_changed
end
def setup_test_executable_dependencies(test, objects)
@rake_wrapper.create_file_task( @file_path_utils.form_test_executable_filepath(test), objects )
def enhance_test_executable_dependencies(test, objects)
@rake_wrapper[ @file_path_utils.form_test_executable_filepath(test) ].enhance( objects )
end
end

View File

@ -25,10 +25,12 @@ class FileFinderHelper
end
case (complain)
when :error then blow_up(file_name, extra_message) if (file_to_find.nil?)
when :warn then gripe(file_name, extra_message) if (file_to_find.nil?)
#when :ignore then
if file_to_find.nil?
case (complain)
when :error then blow_up(file_name, extra_message)
when :warn then gripe(file_name, extra_message)
#when :ignore then
end
end
return file_to_find

View File

@ -21,9 +21,11 @@ class FilePathUtils
# standardize path to use '/' path separator & have no trailing path separator
def self.standardize(path)
path.strip!
path.gsub!(/\\/, '/')
path.chomp!('/')
if path.is_a? String
path.strip!
path.gsub!(/\\/, '/')
path.chomp!('/')
end
return path
end

View File

@ -33,15 +33,15 @@ class FileWrapper
end
def rm_f(filepath, options={})
FileUtils.rm_f(filepath, options)
FileUtils.rm_f(filepath, **options)
end
def rm_r(filepath, options={})
FileUtils.rm_r(filepath, options={})
FileUtils.rm_r(filepath, **options={})
end
def cp(source, destination, options={})
FileUtils.cp(source, destination, options)
FileUtils.cp(source, destination, **options)
end
def compare(from, to)
@ -59,7 +59,7 @@ class FileWrapper
end
def touch(filepath, options={})
FileUtils.touch(filepath, options)
FileUtils.touch(filepath, **options)
end
def write(filepath, contents, flags='w')

View File

@ -101,19 +101,21 @@ class Generator
shell_result = ex.shell_result
raise ex
ensure
arg_hash[:shell_command] = command[:line]
arg_hash[:shell_result] = shell_result
@plugin_manager.post_compile_execute(arg_hash)
end
end
def generate_executable_file(tool, context, objects, executable, map='', libraries=[])
def generate_executable_file(tool, context, objects, executable, map='', libraries=[], libpaths=[])
shell_result = {}
arg_hash = { :tool => tool,
:context => context,
:objects => objects,
:executable => executable,
:map => map,
:libraries => libraries
:libraries => libraries,
:libpaths => libpaths
}
@plugin_manager.pre_link_execute(arg_hash)
@ -125,7 +127,8 @@ class Generator
arg_hash[:objects],
arg_hash[:executable],
arg_hash[:map],
arg_hash[:libraries]
arg_hash[:libraries],
arg_hash[:libpaths]
)
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)

View File

@ -37,6 +37,10 @@ class GeneratorTestResults
elements = extract_line_elements(line, results[:source][:file])
results[:successes] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
when /(:PASS \(.* ms\)$)/
elements = extract_line_elements(line, results[:source][:file])
results[:successes] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
when /(:FAIL)/
elements = extract_line_elements(line, results[:source][:file])
results[:failures] << elements[0]
@ -73,6 +77,7 @@ class GeneratorTestResults
# handle anything preceding filename in line as extra output to be collected
stdout = nil
stdout_regex = /(.+)#{Regexp.escape(filename)}.+/i
unity_test_time = 0
if (line =~ stdout_regex)
stdout = $1.clone
@ -82,8 +87,14 @@ class GeneratorTestResults
# collect up test results minus and extra output
elements = (line.strip.split(':'))[1..-1]
return {:test => elements[1], :line => elements[0].to_i, :message => (elements[3..-1].join(':')).strip}, stdout if elements.size >= 3
return {:test => '???', :line => -1, :message => nil} #fallback safe option. TODO better handling
# find timestamp if available
if (elements[-1] =~ / \((\d*(?:\.\d*)?) ms\)/)
unity_test_time = $1.to_f / 1000
elements[-1].sub!(/ \((\d*(?:\.\d*)?) ms\)/, '')
end
return {:test => elements[1], :line => elements[0].to_i, :message => (elements[3..-1].join(':')).strip, :unity_test_time => unity_test_time}, stdout if elements.size >= 3
return {:test => '???', :line => -1, :message => nil, :unity_test_time => unity_test_time} #fallback safe option. TODO better handling
end
end

View File

@ -44,13 +44,15 @@ class GeneratorTestRunner
def generate(module_name, runner_filepath, test_cases, mock_list, test_file_includes=[])
require 'generate_test_runner.rb'
header_extension = @configurator.extension_header
#actually build the test runner using Unity's test runner generator
#(there is no need to use preprocessor here because we've already looked up test cases and are passing them in here)
@test_runner_generator ||= UnityTestRunnerGenerator.new( @configurator.get_runner_config )
@test_runner_generator.generate( module_name,
runner_filepath,
test_cases,
mock_list,
test_file_includes)
mock_list.map{|f| File.basename(f,'.*')+header_extension},
test_file_includes.map{|f| File.basename(f,'.*')+header_extension})
end
end

View File

@ -17,11 +17,11 @@ reportinator:
rake_utils:
compose:
- rake_wrapper
- rake_wrapper
system_utils:
compose:
- system_wrapper
- system_wrapper
file_path_utils:
compose:
@ -203,13 +203,13 @@ generator_helper:
generator_test_results:
compose:
- configurator
- configurator
- generator_test_results_sanity_checker
- yaml_wrapper
- yaml_wrapper
generator_test_results_sanity_checker:
compose:
- configurator
- configurator
- streaminator
generator_test_runner:
@ -223,43 +223,46 @@ dependinator:
- configurator
- project_config_manager
- test_includes_extractor
- file_path_utils
- file_path_utils
- rake_wrapper
- file_wrapper
preprocessinator:
compose:
- preprocessinator_helper
- preprocessinator_helper
- preprocessinator_includes_handler
- preprocessinator_file_handler
- task_invoker
- task_invoker
- file_path_utils
- yaml_wrapper
- project_config_manager
- configurator
preprocessinator_helper:
compose:
- configurator
- test_includes_extractor
- task_invoker
- file_finder
- file_path_utils
compose:
- configurator
- test_includes_extractor
- task_invoker
- file_finder
- file_path_utils
preprocessinator_includes_handler:
compose:
- configurator
- tool_executor
- task_invoker
- file_path_utils
- yaml_wrapper
- file_wrapper
- configurator
- tool_executor
- task_invoker
- file_path_utils
- yaml_wrapper
- file_wrapper
- file_finder
preprocessinator_file_handler:
compose:
- preprocessinator_extractor
- configurator
- tool_executor
- configurator
- tool_executor
- file_path_utils
- file_wrapper
- file_wrapper
preprocessinator_extractor:

View File

@ -0,0 +1,56 @@
class Preprocessinator
constructor :preprocessinator_helper, :preprocessinator_includes_handler, :preprocessinator_file_handler, :task_invoker, :file_path_utils, :yaml_wrapper, :project_config_manager, :configurator
def setup
# fashion ourselves callbacks @preprocessinator_helper can use
@preprocess_includes_proc = Proc.new { |filepath| self.preprocess_shallow_includes(filepath) }
@preprocess_mock_file_proc = Proc.new { |filepath| self.preprocess_file(filepath) }
@preprocess_test_file_directives_proc = Proc.new { |filepath| self.preprocess_file_directives(filepath) }
@preprocess_test_file_proc = Proc.new { |filepath| self.preprocess_file(filepath) }
end
def preprocess_shallow_source_includes(test)
@preprocessinator_helper.preprocess_source_includes(test)
end
def preprocess_test_and_invoke_test_mocks(test)
@preprocessinator_helper.preprocess_includes(test, @preprocess_includes_proc)
mocks_list = @preprocessinator_helper.assemble_mocks_list(test)
@project_config_manager.process_test_defines_change(mocks_list)
@preprocessinator_helper.preprocess_mockable_headers(mocks_list, @preprocess_mock_file_proc)
@task_invoker.invoke_test_mocks(mocks_list)
if (@configurator.project_use_preprocessor_directives)
@preprocessinator_helper.preprocess_test_file(test, @preprocess_test_file_directives_proc)
else
@preprocessinator_helper.preprocess_test_file(test, @preprocess_test_file_proc)
end
return mocks_list
end
def preprocess_shallow_includes(filepath)
includes = @preprocessinator_includes_handler.extract_includes(filepath)
@preprocessinator_includes_handler.write_shallow_includes_list(
@file_path_utils.form_preprocessed_includes_list_filepath(filepath), includes)
end
def preprocess_file(filepath)
@preprocessinator_includes_handler.invoke_shallow_includes_list(filepath)
@preprocessinator_file_handler.preprocess_file( filepath, @yaml_wrapper.load(@file_path_utils.form_preprocessed_includes_list_filepath(filepath)) )
end
def preprocess_file_directives(filepath)
@preprocessinator_includes_handler.invoke_shallow_includes_list( filepath )
@preprocessinator_file_handler.preprocess_file_directives( filepath,
@yaml_wrapper.load( @file_path_utils.form_preprocessed_includes_list_filepath( filepath ) ) )
end
end

View File

@ -16,6 +16,7 @@ class PreprocessinatorExtractor
lines = []
File.readlines(filepath).each do |line|
line.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
if found_file and not line =~ not_pragma
lines << line
else
@ -27,4 +28,28 @@ class PreprocessinatorExtractor
return lines
end
def extract_base_file_from_preprocessed_directives(filepath)
# preprocessing by way of toolchain preprocessor eliminates directives only
# like #ifdef's and leave other code
# iterate through all lines and only get last chunk of file after a last
# '#'line containing file name of our filepath
base_name = File.basename(filepath)
pattern = /^#.*(\s|\/|\\|\")#{Regexp.escape(base_name)}/
found_file = false # have we found the file we care about?
lines = []
File.readlines(filepath).each do |line|
line.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
lines << line
if line =~ pattern
lines = []
end
end
return lines
end
end

View File

@ -18,4 +18,17 @@ class PreprocessinatorFileHandler
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
end
def preprocess_file_directives(filepath, includes)
preprocessed_filepath = @file_path_utils.form_preprocessed_file_filepath(filepath)
command = @tool_executor.build_command_line(@configurator.tools_test_file_preprocessor_directives, [], filepath, preprocessed_filepath)
@tool_executor.exec(command[:line], command[:options])
contents = @preprocessinator_extractor.extract_base_file_from_preprocessed_directives(preprocessed_filepath)
includes.each{|include| contents.unshift("#include \"#{include}\"")}
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
end
end

View File

@ -15,6 +15,10 @@ class PreprocessinatorHelper
end
end
def preprocess_source_includes(test)
@test_includes_extractor.parse_test_file_source_include(test)
end
def assemble_mocks_list(test)
return @file_path_utils.form_mocks_source_filelist( @test_includes_extractor.lookup_raw_mock_list(test) )
end

View File

@ -2,7 +2,7 @@
class PreprocessinatorIncludesHandler
constructor :configurator, :tool_executor, :task_invoker, :file_path_utils, :yaml_wrapper, :file_wrapper
constructor :configurator, :tool_executor, :task_invoker, :file_path_utils, :yaml_wrapper, :file_wrapper, :file_finder
@@makefile_cache = {}
# shallow includes: only those headers a source file explicitly includes
@ -65,6 +65,7 @@ class PreprocessinatorIncludesHandler
to_process = [filepath]
ignore_list = []
list = []
all_mocks = []
include_paths = @configurator.project_config_hash[:collection_paths_include]
include_paths = [] if include_paths.nil?
@ -73,12 +74,10 @@ class PreprocessinatorIncludesHandler
while to_process.length > 0
target = to_process.shift()
ignore_list << target
# puts "[HELL] Processing: \t\t#{target}"
new_deps, new_to_process = extract_includes_helper(target, include_paths, ignore_list)
new_deps, new_to_process, all_mocks = extract_includes_helper(target, include_paths, ignore_list, all_mocks)
list += new_deps
to_process += new_to_process
if (!@configurator.project_config_hash.has_key?(:project_auto_link_deep_dependencies) or
!@configurator.project_config_hash[:project_auto_link_deep_dependencies])
if !@configurator.project_config_hash[:project_auto_link_deep_dependencies]
break
else
list = list.uniq()
@ -89,93 +88,102 @@ class PreprocessinatorIncludesHandler
return list
end
def extract_includes_helper(filepath, include_paths, ignore_list)
def extract_includes_helper(filepath, include_paths, ignore_list, mocks)
# Extract the dependencies from the make rule
hdr_ext = @configurator.extension_header
make_rule = self.form_shallow_dependencies_rule(filepath)
dependencies = make_rule.split.find_all {|path| path.end_with?(hdr_ext) }.uniq
dependencies.map! {|hdr| hdr.gsub('\\','/') }
target_file = make_rule.split[0].gsub(':', '').gsub('\\','/')
base = File.basename(target_file, File.extname(target_file))
make_rule_dependencies = make_rule.gsub(/.*\b#{Regexp.escape(base)}\S*/, '').gsub(/\\$/, '')
# Extract the headers dependencies from the make rule
hdr_ext = @configurator.extension_header
headers_dependencies = make_rule_dependencies.split.find_all {|path| path.end_with?(hdr_ext) }.uniq
headers_dependencies.map! {|hdr| hdr.gsub('\\','/') }
full_path_headers_dependencies = extract_full_path_dependencies(headers_dependencies)
# Separate the real files form the annotated ones and remove the '@@@@'
annotated_headers, real_headers = dependencies.partition {|hdr| hdr =~ /^@@@@/ }
annotated_headers.map! {|hdr| hdr.gsub('@@@@','') }
# Matching annotated_headers values against real_headers to ensure that
# annotated_headers contain full path entries (as returned by make rule)
annotated_headers.map! {|hdr| real_headers.find {|real_hdr| !real_hdr.match(/(.*\/)?#{Regexp.escape(hdr)}/).nil? } }
annotated_headers = annotated_headers.compact
# Extract the sources dependencies from the make rule
src_ext = @configurator.extension_source
sources_dependencies = make_rule_dependencies.split.find_all {|path| path.end_with?(src_ext) }.uniq
sources_dependencies.map! {|src| src.gsub('\\','/') }
full_path_sources_dependencies = extract_full_path_dependencies(sources_dependencies)
# Find which of our annotated headers are "real" dependencies. This is
# intended to weed out dependencies that have been removed due to build
# options defined in the project yaml and/or in the headers themselves.
list = annotated_headers.find_all do |annotated_header|
# find the index of the "real" include that matches the annotated one.
idx = real_headers.find_index do |real_header|
real_header =~ /^(.*\/)?#{Regexp.escape(annotated_header)}$/
end
# If we found a real include, delete it from the array and return it,
# otherwise return nil. Since nil is falsy this has the effect of making
# find_all return only the annotated headers for which a real include was
# found/deleted
idx ? real_headers.delete_at(idx) : nil
list = full_path_headers_dependencies + full_path_sources_dependencies
mock_prefix = @configurator.project_config_hash[:cmock_mock_prefix]
# Creating list of mocks
mocks += full_path_headers_dependencies.find_all do |header|
File.basename(header) =~ /^#{mock_prefix}.*$/
end.compact
# Extract direct dependencies that were also added
src_ext = @configurator.extension_source
sdependencies = make_rule.split.find_all {|path| path.end_with?(src_ext) }.uniq
sdependencies.map! {|hdr| hdr.gsub('\\','/') }
list += sdependencies
# ignore real file when both mock and real file exist
mocks.each do |mock|
list.each do |filename|
if File.basename(filename) == File.basename(mock).sub(mock_prefix, '')
ignore_list << filename
end
end
end.compact
# Filtering list of final includes to only include mocks and anything that is NOT in the ignore_list
list = list.select do |item|
mocks.include? item or !(ignore_list.any? { |ignore_item| !item.match(/^(.*\/)?#{Regexp.escape(ignore_item)}$/).nil? })
end
to_process = []
if @configurator.project_config_hash.has_key?(:project_auto_link_deep_dependencies) && @configurator.project_config_hash[:project_auto_link_deep_dependencies]
# Creating list of mocks
mocks = annotated_headers.find_all do |annotated_header|
File.basename(annotated_header) =~ /^#{@configurator.project_config_hash[:cmock_mock_prefix]}.*$/
end.compact
if @configurator.project_config_hash[:project_auto_link_deep_dependencies]
# Creating list of headers that should be recursively pre-processed
# Skipping mocks and unity.h
headers_to_deep_link = annotated_headers.select do |annotated_header|
!(mocks.include? annotated_header) and (annotated_header.match(/^(.*\/)?unity\.h$/).nil?)
end
headers_to_deep_link.map! {|hdr| File.expand_path(hdr)}
mocks.each do |mock|
dirname = File.dirname(mock)
#basename = File.basename(mock).delete_prefix(@configurator.project_config_hash[:cmock_mock_prefix])
basename = File.basename(mock).sub(@configurator.project_config_hash[:cmock_mock_prefix], '')
if dirname != "."
ignore_list << File.join(dirname, basename)
else
ignore_list << basename
end
end.compact
# Filtering list of final includes to only include mocks and anything that is NOT in the ignore_list
list = list.select do |item|
mocks.include? item or !(ignore_list.any? { |ignore_item| !item.match(/^(.*\/)?#{Regexp.escape(ignore_item)}$/).nil? })
# Skipping mocks and vendor headers
headers_to_deep_link = full_path_headers_dependencies.select do |hdr|
!(mocks.include? hdr) and (hdr.match(/^(.*\/)(#{VENDORS_FILES.join('|')}) + #{Regexp.escape(hdr_ext)}$/).nil?)
end
headers_to_deep_link.map! {|hdr| File.expand_path(hdr) }
headers_to_deep_link.compact!
headers_to_deep_link.each do |hdr|
if (ignore_list.none? {|ignore_header| hdr.match(/^(.*\/)?#{Regexp.escape(ignore_header)}$/)} and
include_paths.none? {|include_path| hdr =~ /^#{include_path}\.*/})
if File.exist?(hdr)
to_process << hdr
#source_file = hdr.delete_suffix(hdr_ext) + src_ext
source_file = hdr.chomp(hdr_ext) + src_ext
if source_file != hdr and File.exist?(source_file)
to_process << source_file
end
src = @file_finder.find_compilation_input_file(hdr, :ignore)
to_process << src if src
end
end
end
end
return list, to_process
return list, to_process, mocks
end
def write_shallow_includes_list(filepath, list)
@yaml_wrapper.dump(filepath, list)
end
private
def extract_full_path_dependencies(dependencies)
# Separate the real files form the annotated ones and remove the '@@@@'
annotated_files, real_files = dependencies.partition {|file| file =~ /^@@@@/}
annotated_files.map! {|file| file.gsub('@@@@','') }
# Matching annotated_files values against real_files to ensure that
# annotated_files contain full path entries (as returned by make rule)
annotated_files.map! {|file| real_files.find {|real| !real.match(/^(.*\/)?#{Regexp.escape(file)}$/).nil?}}
annotated_files = annotated_files.compact
# Find which of our annotated files are "real" dependencies. This is
# intended to weed out dependencies that have been removed due to build
# options defined in the project yaml and/or in the files themselves.
return annotated_files.find_all do |annotated_file|
# find the index of the "real" file that matches the annotated one.
idx = real_files.find_index do |real_file|
real_file =~ /^(.*\/)?#{Regexp.escape(annotated_file)}$/
end
# If we found a real file, delete it from the array and return it,
# otherwise return nil. Since nil is falsy this has the effect of making
# find_all return only the annotated filess for which a real file was
# found/deleted
idx ? real_files.delete_at(idx) : nil
end.compact
end
end

View File

@ -21,9 +21,15 @@ class ProjectConfigManager
@options_files << File.basename( option_filepath )
config_hash.deep_merge!( @yaml_wrapper.load( option_filepath ) )
end
def filter_internal_sources(sources)
filtered_sources = sources.clone
filtered_sources.delete_if { |item| item =~ /#{CMOCK_MOCK_PREFIX}.+#{Regexp.escape(EXTENSION_SOURCE)}$/ }
filtered_sources.delete_if { |item| item =~ /#{VENDORS_FILES.map{|source| '\b' + Regexp.escape(source.ext(EXTENSION_SOURCE)) + '\b'}.join('|')}$/ }
return filtered_sources
end
def process_release_config_change
# has project configuration changed since last release build
@release_config_changed = @cacheinator.diff_cached_release_config?( @config_hash )
@ -40,7 +46,7 @@ class ProjectConfigManager
@test_defines_changed = @cacheinator.diff_cached_test_defines?( files )
if @test_defines_changed
# update timestamp for rake task prerequisites
@file_wrapper.touch( @configurator.project_test_force_rebuild_filepath )
@file_wrapper.touch( @configurator.project_test_force_rebuild_filepath, :mtime => Time.now + 10 )
end
end
end

View File

@ -10,7 +10,6 @@ $LOAD_PATH.unshift( CEEDLING_LIB )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'unity/auto') )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'diy/lib') )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'cmock/lib') )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'deep_merge/lib') )
require 'rake'

View File

@ -56,15 +56,40 @@ class ReleaseInvoker
end
def convert_libraries_to_arguments(libraries)
args = (libraries || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])
args = ((libraries || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])).flatten
if (defined? LIBRARIES_FLAG)
args.map! {|v| LIBRARIES_FLAG.gsub(/\$\{1\}/, v) }
end
return args
end
def get_library_paths_to_arguments()
paths = (defined? PATHS_LIBRARIES) ? (PATHS_LIBRARIES || []).clone : []
if (defined? LIBRARIES_PATH_FLAG)
paths.map! {|v| LIBRARIES_PATH_FLAG.gsub(/\$\{1\}/, v) }
end
return paths
end
def sort_objects_and_libraries(both)
extension = "\\" + ((defined? EXTENSION_SUBPROJECTS) ? EXTENSION_SUBPROJECTS : ".LIBRARY")
extension = if ((defined? EXTENSION_SUBPROJECTS) && (defined? EXTENSION_LIBRARIES))
extension_libraries = if (EXTENSION_LIBRARIES.class == Array)
EXTENSION_LIBRARIES.join(")|(?:\\")
else
EXTENSION_LIBRARIES
end
"(?:\\#{EXTENSION_SUBPROJECTS})|(?:\\#{extension_libraries})"
elsif (defined? EXTENSION_SUBPROJECTS)
"\\#{EXTENSION_SUBPROJECTS}"
elsif (defined? EXTENSION_LIBRARIES)
if (EXTENSION_LIBRARIES.class == Array)
"(?:\\#{EXTENSION_LIBRARIES.join(")|(?:\\")})"
else
"\\#{EXTENSION_LIBRARIES}"
end
else
"\\.LIBRARY"
end
sorted_objects = both.group_by {|v| v.match(/.+#{extension}$/) ? :libraries : :objects }
libraries = sorted_objects[:libraries] || []
objects = sorted_objects[:objects] || []

View File

@ -2,6 +2,17 @@
RELEASE_COMPILE_TASK_ROOT = RELEASE_TASK_ROOT + 'compile:' unless defined?(RELEASE_COMPILE_TASK_ROOT)
RELEASE_ASSEMBLE_TASK_ROOT = RELEASE_TASK_ROOT + 'assemble:' unless defined?(RELEASE_ASSEMBLE_TASK_ROOT)
# If GCC and Releasing a Library, Update Tools to Automatically Have Necessary Tags
if (TOOLS_RELEASE_COMPILER[:executable] == DEFAULT_RELEASE_COMPILER_TOOL[:executable])
if (File.extname(PROJECT_RELEASE_BUILD_TARGET) == '.so')
TOOLS_RELEASE_COMPILER[:arguments] << "-fPIC" unless TOOLS_RELEASE_COMPILER[:arguments].include?("-fPIC")
TOOLS_RELEASE_LINKER[:arguments] << "-shared" unless TOOLS_RELEASE_LINKER[:arguments].include?("-shared")
elsif (File.extname(PROJECT_RELEASE_BUILD_TARGET) == '.a')
TOOLS_RELEASE_COMPILER[:arguments] << "-fPIC" unless TOOLS_RELEASE_COMPILER[:arguments].include?("-fPIC")
TOOLS_RELEASE_LINKER[:executable] = 'ar'
TOOLS_RELEASE_LINKER[:arguments] = ['rcs', '${2}', '${1}'].compact
end
end
if (RELEASE_BUILD_USE_ASSEMBLY)
rule(/#{PROJECT_RELEASE_BUILD_OUTPUT_ASM_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
@ -37,16 +48,18 @@ end
rule(/#{PROJECT_RELEASE_BUILD_TARGET}/) do |bin_file|
objects, libraries = @ceedling[:release_invoker].sort_objects_and_libraries(bin_file.prerequisites)
tool = TOOLS_RELEASE_LINKER.clone
lib_args = @ceedling[:release_invoker].convert_libraries_to_arguments(libraries)
map_file = @ceedling[:configurator].project_release_build_map
tool = TOOLS_RELEASE_LINKER.clone
lib_args = @ceedling[:release_invoker].convert_libraries_to_arguments(libraries)
lib_paths = @ceedling[:release_invoker].get_library_paths_to_arguments()
map_file = @ceedling[:configurator].project_release_build_map
@ceedling[:generator].generate_executable_file(
tool,
RELEASE_SYM,
objects,
bin_file.name,
map_file,
lib_args )
lib_args,
lib_paths )
@ceedling[:release_invoker].artifactinate( bin_file.name, map_file, @ceedling[:configurator].release_build_artifacts )
end

View File

@ -34,16 +34,16 @@ end
rule(/#{PROJECT_TEST_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
lib_args = @ceedling[:test_invoker].convert_libraries_to_arguments()
lib_paths = @ceedling[:test_invoker].get_library_paths_to_arguments()
@ceedling[:generator].generate_executable_file(
TOOLS_TEST_LINKER,
TEST_SYM,
bin_file.prerequisites,
bin_file.name,
@ceedling[:file_path_utils].form_test_build_map_filepath( bin_file.name ),
lib_args )
lib_args,
lib_paths )
end
@ -66,8 +66,7 @@ namespace TEST_SYM do
@ceedling[:file_finder].find_test_from_file_path(test)
end
]) do |test|
@ceedling[:rake_wrapper][:directories].reenable if @ceedling[:task_invoker].first_run == false && @ceedling[:project_config_manager].test_defines_changed
@ceedling[:rake_wrapper][:directories].invoke
@ceedling[:rake_wrapper][:test_deps].invoke
@ceedling[:test_invoker].setup_and_invoke([test.source])
end
end

View File

@ -25,8 +25,8 @@ class Setupinator
@ceedling[:configurator].populate_cmock_defaults( config_hash )
@ceedling[:configurator].find_and_merge_plugins( config_hash )
@ceedling[:configurator].merge_imports( config_hash )
@ceedling[:configurator].tools_setup( config_hash )
@ceedling[:configurator].eval_environment_variables( config_hash )
@ceedling[:configurator].tools_setup( config_hash )
@ceedling[:configurator].eval_paths( config_hash )
@ceedling[:configurator].standardize_paths( config_hash )
@ceedling[:configurator].validate( config_hash )

View File

@ -46,25 +46,31 @@ class TaskInvoker
return @rake_utils.task_invoked?(regex)
end
def reset_rake_task_for_changed_defines(file)
if !(file =~ /#{VENDORS_FILES.map{|ignore| '\b' + ignore.ext(File.extname(file)) + '\b'}.join('|')}$/)
@rake_wrapper[file].clear_actions if @first_run == false && @project_config_manager.test_defines_changed
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
end
end
def invoke_test_mocks(mocks)
@dependinator.enhance_mock_dependencies( mocks )
mocks.each { |mock|
@rake_wrapper[mock].reenable if @first_run == false && @project_config_manager.test_defines_changed
reset_rake_task_for_changed_defines( mock )
@rake_wrapper[mock].invoke
}
end
def invoke_test_runner(runner)
@dependinator.enhance_runner_dependencies( runner )
@rake_wrapper[runner].reenable if @first_run == false && @project_config_manager.test_defines_changed
reset_rake_task_for_changed_defines( runner )
@rake_wrapper[runner].invoke
end
def invoke_test_shallow_include_lists(files)
@dependinator.enhance_shallow_include_lists_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
@ -72,7 +78,7 @@ class TaskInvoker
def invoke_test_preprocessed_files(files)
@dependinator.enhance_preprocesed_file_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
@ -80,14 +86,14 @@ class TaskInvoker
def invoke_test_dependencies_files(files)
@dependinator.enhance_dependencies_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
def invoke_test_objects(objects)
par_map(PROJECT_COMPILE_THREADS, objects) do |object|
@rake_wrapper[object].reenable if @first_run == false && @project_config_manager.test_defines_changed
reset_rake_task_for_changed_defines( object )
@rake_wrapper[object].invoke
end
end
@ -98,7 +104,6 @@ class TaskInvoker
def invoke_test_results(result)
@dependinator.enhance_results_dependencies( result )
@rake_wrapper[result].reenable if @first_run == false && @project_config_manager.test_defines_changed
@rake_wrapper[result].invoke
end

View File

@ -4,28 +4,10 @@ require 'ceedling/version'
desc "Display build environment version info."
task :version do
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
[
['CException', File.join( CEEDLING_VENDOR, CEXCEPTION_ROOT_PATH)],
[' CMock', File.join( CEEDLING_VENDOR, CMOCK_ROOT_PATH)],
[' Unity', File.join( CEEDLING_VENDOR, UNITY_ROOT_PATH)],
].each do |tool|
name = tool[0]
base_path = tool[1]
version_string = begin
@ceedling[:file_wrapper].read( File.join(base_path, 'release', 'version.info') ).strip
rescue
"UNKNOWN"
end
build_string = begin
@ceedling[:file_wrapper].read( File.join(base_path, 'release', 'build.info') ).strip
rescue
"UNKNOWN"
end
puts "#{name}:: #{version_string.empty? ? '#.#.' : (version_string + '.')}#{build_string.empty? ? '?' : build_string}"
end
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
puts " Unity:: #{Ceedling::Version::UNITY}"
puts " CMock:: #{Ceedling::Version::CMOCK}"
puts " CException:: #{Ceedling::Version::CEXCEPTION}"
end
desc "Set verbose output (silent:[#{Verbosity::SILENT}] - obnoxious:[#{Verbosity::OBNOXIOUS}])."
@ -65,6 +47,12 @@ task :sanity_checks, :level do |t, args|
@ceedling[:configurator].sanity_checks = check_level
end
# non advertised catch for calling upgrade in the wrong place
task :upgrade do
puts "WARNING: You're currently IN your project directory. Take a step out and try"
puts "again if you'd like to perform an upgrade."
end
# list expanded environment variables
if (not ENVIRONMENT.empty?)
desc "List all configured environment variables."
@ -73,7 +61,7 @@ task :environment do
ENVIRONMENT.each do |env|
env.each_key do |key|
name = key.to_s.upcase
env_list.push(" - #{name}: \"#{env[key]}\"")
env_list.push(" - #{name}: \"#{env[key]}\"")
end
end
env_list.sort.each do |env_line|
@ -88,7 +76,7 @@ namespace :options do
option = File.basename(option_path, '.yml')
desc "Merge #{option} project options."
task option.downcase.to_sym do
task option.to_sym do
hash = @ceedling[:project_config_manager].merge_options( @ceedling[:setupinator].config_hash, option_path )
@ceedling[:setupinator].do_setup( hash )
if @ceedling[:configurator].project_release_build
@ -97,6 +85,23 @@ namespace :options do
end
end
# This is to give nice errors when typing options
rule /^options:.*/ do |t, args|
filename = t.to_s.split(':')[-1] + '.yml'
filelist = COLLECTION_PROJECT_OPTIONS.map{|s| File.basename(s) }
@ceedling[:file_finder].find_file_from_list(filename, filelist, :error)
end
# This will output the fully-merged tools options to their own project.yml file
desc "Export tools options to a new project file"
task :export, :filename do |t, args|
outfile = args.filename || 'tools.yml'
toolcfg = {}
@ceedling[:configurator].project_config_hash.each_pair do |k,v|
toolcfg[k] = v if (k.to_s[0..5] == 'tools_')
end
File.open(outfile,'w') {|f| f << toolcfg.to_yaml({:indentation => 2})}
end
end

View File

@ -45,26 +45,35 @@ task(:clobber => [:clean]) do
@ceedling[:streaminator].stdout_puts("\nClobbering all generated files...\n(For large projects, this task may take a long time to complete)\n\n")
begin
CLOBBER.each { |fn| REMOVE_FILE_PROC.call(fn) }
@ceedling[:rake_wrapper][:directories].invoke
@ceedling[:dependinator].touch_force_rebuild_files
rescue
end
end
# create a directory task for each of the paths, so we know how to build them
PROJECT_BUILD_PATHS.each { |path| directory(path) }
# create directories that hold build output and generated files & touching rebuild dependency sources
task(:directories => PROJECT_BUILD_PATHS) { @ceedling[:dependinator].touch_force_rebuild_files }
# create a single directory task which verifies all the others get built
task :directories => PROJECT_BUILD_PATHS
# when the force file doesn't exist, it probably means we clobbered or are on a fresh
# install. In either case, stuff was deleted, so assume we want to rebuild it all
file @ceedling[:configurator].project_test_force_rebuild_filepath do
unless File.exists?(@ceedling[:configurator].project_test_force_rebuild_filepath)
@ceedling[:dependinator].touch_force_rebuild_files
end
end
# list paths discovered at load time
namespace :paths do
paths = @ceedling[:setupinator].config_hash[:paths]
paths.each_key do |section|
name = section.to_s.downcase
standard_paths = ['test','source','include']
paths = @ceedling[:setupinator].config_hash[:paths].keys.map{|n| n.to_s.downcase}
paths = (paths + standard_paths).uniq
paths.each do |name|
path_list = Object.const_get("COLLECTION_PATHS_#{name.upcase}")
if (path_list.size != 0)
if (path_list.size != 0) || (standard_paths.include?(name))
desc "List all collected #{name} paths."
task(name.to_sym) { puts "#{name} paths:"; path_list.sort.each {|path| puts " - #{path}" } }
end
@ -77,10 +86,11 @@ end
namespace :files do
categories = [
['test', COLLECTION_ALL_TESTS],
['source', COLLECTION_ALL_SOURCE],
['header', COLLECTION_ALL_HEADERS]
]
['test', COLLECTION_ALL_TESTS],
['source', COLLECTION_ALL_SOURCE],
['include', COLLECTION_ALL_HEADERS],
['support', COLLECTION_ALL_SUPPORT]
]
using_assembly = (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY) ||
(defined?(RELEASE_BUILD_USE_ASSEMBLY) && RELEASE_BUILD_USE_ASSEMBLY)

View File

@ -1,13 +1,15 @@
require 'ceedling/constants'
task :test => [:directories] do
task :test_deps => [:directories]
task :test => [:test_deps] do
Rake.application['test:all'].invoke
end
namespace TEST_SYM do
desc "Run all unit tests (also just 'test' works)."
task :all => [:directories] do
task :all => [:test_deps] do
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS)
end
@ -21,17 +23,17 @@ namespace TEST_SYM do
end
desc "Run tests for changed files."
task :delta => [:directories] do
task :delta => [:test_deps] do
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, TEST_SYM, {:force_run => false})
end
desc "Just build tests without running."
task :build_only => [:directories] do
task :build_only => [:test_deps] do
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, TEST_SYM, {:build_only => true})
end
desc "Run tests by matching regular expression pattern."
task :pattern, [:regex] => [:directories] do |t, args|
task :pattern, [:regex] => [:test_deps] do |t, args|
matches = []
COLLECTION_ALL_TESTS.each { |test| matches << test if (test =~ /#{args.regex}/) }
@ -44,7 +46,7 @@ namespace TEST_SYM do
end
desc "Run tests whose test path contains [dir] or [dir] substring."
task :path, [:dir] => [:directories] do |t, args|
task :path, [:dir] => [:test_deps] do |t, args|
matches = []
COLLECTION_ALL_TESTS.each { |test| matches << test if File.dirname(test).include?(args.dir.gsub(/\\/, '/')) }

View File

@ -19,6 +19,11 @@ class TestIncludesExtractor
gather_and_store_includes( test, extract_from_file(test) )
end
# open, scan for, and sort & store includes of test file
def parse_test_file_source_include(test)
return extract_source_include_from_file(test)
end
# mocks with no file extension
def lookup_raw_mock_list(test)
file_key = form_file_key(test)
@ -65,6 +70,27 @@ class TestIncludesExtractor
return includes.uniq
end
def extract_source_include_from_file(file)
source_includes = []
source_extension = @configurator.extension_source
contents = @file_wrapper.read(file)
# remove line comments
contents = contents.gsub(/\/\/.*$/, '')
# remove block comments
contents = contents.gsub(/\/\*.*?\*\//m, '')
contents.split("\n").each do |line|
# look for include statement
scan_results = line.scan(/#include\s+\"\s*(.+#{'\\'+source_extension})\s*\"/)
source_includes << scan_results[0][0] if (scan_results.size > 0)
end
return source_includes.uniq
end
def gather_and_store_includes(file, includes)
mock_prefix = @configurator.cmock_mock_prefix
header_extension = @configurator.extension_header

View File

@ -23,49 +23,24 @@ class TestInvoker
@mocks = []
end
def get_test_definition_str(test)
return "-D" + File.basename(test, File.extname(test)).upcase.sub(/@.*$/, "")
end
def get_tools_compilers
tools_compilers = Hash.new
tools_compilers["for unit test"] = TOOLS_TEST_COMPILER if defined? TOOLS_TEST_COMPILER
tools_compilers["for gcov"] = TOOLS_GCOV_COMPILER if defined? TOOLS_GCOV_COMPILER
return tools_compilers
end
def add_test_definition(test)
test_definition_str = get_test_definition_str(test)
get_tools_compilers.each do |tools_compiler_key, tools_compiler_value|
tools_compiler_value[:arguments].push("-D#{File.basename(test, ".*").strip.upcase.sub(/@.*$/, "")}")
@streaminator.stdout_puts("Add the definition value in the build option #{tools_compiler_value[:arguments][-1]} #{tools_compiler_key}", Verbosity::OBNOXIOUS)
end
end
def delete_test_definition(test)
test_definition_str = get_test_definition_str(test)
get_tools_compilers.each do |tools_compiler_key, tools_compiler_value|
num_options = tools_compiler_value[:arguments].size
@streaminator.stdout_puts("Delete the definition value in the build option #{tools_compiler_value[:arguments][-1]} #{tools_compiler_key}", Verbosity::OBNOXIOUS)
tools_compiler_value[:arguments].delete_if{|i| i == test_definition_str}
if num_options > tools_compiler_value[:arguments].size + 1
@streaminator.stderr_puts("WARNING: duplicated test definition.")
end
end
end
# Convert libraries configuration form YAML configuration
# into a string that can be given to the compiler.
def convert_libraries_to_arguments()
if @configurator.project_config_hash.has_key?(:libraries_test)
lib_args = @configurator.project_config_hash[:libraries_test]
lib_args.flatten!
lib_flag = @configurator.project_config_hash[:libraries_flag]
lib_args.map! {|v| lib_flag.gsub(/\$\{1\}/, v) } if (defined? lib_flag)
return lib_args
args = ((@configurator.project_config_hash[:libraries_test] || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])).flatten
if (defined? LIBRARIES_FLAG)
args.map! {|v| LIBRARIES_FLAG.gsub(/\$\{1\}/, v) }
end
return args
end
def get_library_paths_to_arguments()
paths = (defined? PATHS_LIBRARIES) ? (PATHS_LIBRARIES || []).clone : []
if (defined? LIBRARIES_PATH_FLAG)
paths.map! {|v| LIBRARIES_PATH_FLAG.gsub(/\$\{1\}/, v) }
end
return paths
end
def setup_and_invoke(tests, context=TEST_SYM, options={:force_run => true, :build_only => false})
@ -83,18 +58,25 @@ class TestInvoker
test_name ="#{File.basename(test)}".chomp('.c')
def_test_key="defines_#{test_name.downcase}"
# Re-define the project out path and pre-processor defines.
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
@project_config_manager.test_config_changed
if @configurator.project_config_hash.has_key?(def_test_key.to_sym) || @configurator.defines_use_test_definition
defs_bkp = Array.new(COLLECTION_DEFINES_TEST_AND_VENDOR)
printf " ************** Specific test definitions for #{test_name} !!! \n"
tst_defs_cfg = @configurator.project_config_hash[def_test_key.to_sym]
tst_defs_cfg = Array.new(defs_bkp)
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
tst_defs_cfg.replace(@configurator.project_config_hash[def_test_key.to_sym])
tst_defs_cfg .concat(COLLECTION_DEFINES_VENDOR) if COLLECTION_DEFINES_VENDOR
end
if @configurator.defines_use_test_definition
tst_defs_cfg << File.basename(test, ".*").strip.upcase.sub(/@.*$/, "")
end
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(tst_defs_cfg)
end
# redefine the project out path and preprocessor defines
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
@streaminator.stdout_puts("Updating test definitions for #{test_name}", Verbosity::NORMAL)
orig_path = @configurator.project_test_build_output_path
@configurator.project_config_hash[:project_test_build_output_path] = File.join(@configurator.project_test_build_output_path, test_name)
@file_wrapper.mkdir(@configurator.project_test_build_output_path)
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(tst_defs_cfg)
# printf " * new defines = #{COLLECTION_DEFINES_TEST_AND_VENDOR}\n"
end
# collect up test fixture pieces & parts
@ -103,16 +85,15 @@ class TestInvoker
sources = @test_invoker_helper.extract_sources( test )
extras = @configurator.collection_test_fixture_extra_link_objects
core = [test] + mock_list + sources
objects = @file_path_utils.form_test_build_objects_filelist( [runner] + core + extras )
objects = @file_path_utils.form_test_build_objects_filelist( [runner] + core + extras ).uniq
results_pass = @file_path_utils.form_pass_results_filepath( test )
results_fail = @file_path_utils.form_fail_results_filepath( test )
@project_config_manager.process_test_defines_change(sources)
# identify all the objects shall not be linked and then remove them from objects list.
no_link_objects = @file_path_utils.form_test_build_objects_filelist(@preprocessinator.preprocess_shallow_source_includes( test ))
objects = objects.uniq - no_link_objects
# add the definition value in the build option for the unit test
if @configurator.defines_use_test_definition
add_test_definition(test)
end
@project_config_manager.process_test_defines_change(@project_config_manager.filter_internal_sources(sources))
# clean results files so we have a missing file with which to kick off rake's dependency rules
@test_invoker_helper.clean_results( {:pass => results_pass, :fail => results_fail}, options )
@ -129,7 +110,7 @@ class TestInvoker
@dependinator.enhance_test_build_object_dependencies( objects )
# associate object files with executable
@dependinator.setup_test_executable_dependencies( test, objects )
@dependinator.enhance_test_executable_dependencies( test, objects )
# build test objects
@task_invoker.invoke_test_objects( objects )
@ -146,18 +127,14 @@ class TestInvoker
rescue => e
@build_invoker_utils.process_exception( e, context )
ensure
# delete the definition value in the build option for the unit test
if @configurator.defines_use_test_definition
delete_test_definition(test)
end
@plugin_manager.post_test( test )
# restore the project test defines
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
# @configurator.project_config_hash[:defines_test] =
if @configurator.project_config_hash.has_key?(def_test_key.to_sym) || @configurator.defines_use_test_definition
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(defs_bkp)
# printf " ---- Restored defines at #{defs_bkp}"
@configurator.project_config_hash[:project_test_build_output_path] = orig_path
printf " ************** Restored defines and build path\n"
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
@configurator.project_config_hash[:project_test_build_output_path] = orig_path
@streaminator.stdout_puts("Restored defines and build path to standard", Verbosity::NORMAL)
end
end
end

View File

@ -11,7 +11,7 @@ class TestInvokerHelper
def process_deep_dependencies(files)
return if (not @configurator.project_use_deep_dependencies)
dependencies_list = @file_path_utils.form_test_dependencies_filelist( files )
dependencies_list = @file_path_utils.form_test_dependencies_filelist( files ).uniq
if @configurator.project_generate_deep_dependencies
@task_invoker.invoke_test_dependencies_files( dependencies_list )

View File

@ -0,0 +1,54 @@
# @private
module Ceedling
module Version
{ "UNITY" => File.join("unity","src","unity.h"),
"CMOCK" => File.join("cmock","src","cmock.h"),
"CEXCEPTION" => File.join("c_exception","lib","CException.h")
}.each_pair do |name, path|
# Check for local or global version of vendor directory in order to look up versions
path1 = File.expand_path( File.join("..","..","vendor",path) )
path2 = File.expand_path( File.join(File.dirname(__FILE__),"..","..","vendor",path) )
filename = if (File.exists?(path1))
path1
elsif (File.exists?(path2))
path2
elsif File.exists?(CEEDLING_VENDOR)
path3 = File.expand_path( File.join(CEEDLING_VENDOR,path) )
if (File.exists?(path3))
path3
else
basepath = File.join( CEEDLING_VENDOR, path.split(/\\\//)[0], 'release')
begin
[ @ceedling[:file_wrapper].read( File.join(base_path, 'release', 'version.info') ).strip,
@ceedling[:file_wrapper].read( File.join(base_path, 'release', 'build.info') ).strip ].join('.')
rescue
"#{name}"
end
end
else
module_eval("#{name} = 'unknown'")
continue
end
# Actually look up the versions
a = [0,0,0]
begin
File.readlines(filename).each do |line|
["VERSION_MAJOR", "VERSION_MINOR", "VERSION_BUILD"].each_with_index do |field, i|
m = line.match(/#{name}_#{field}\s+(\d+)/)
a[i] = m[1] unless (m.nil?)
end
end
rescue
abort("Can't collect data for vendor component: \"#{filename}\" . \nPlease check your setup.")
end
# splat it to return the final value
eval("#{name} = '#{a.join(".")}'")
end
GEM = "0.31.1"
CEEDLING = GEM
end
end

View File

@ -0,0 +1,76 @@
ceedling-bullseye
=================
# Plugin Overview
Plugin for integrating Bullseye code coverage tool into Ceedling projects.
This plugin requires a working license to Bullseye code coverage tools. The tools
must be within the path or the path should be added to the environment in the
`project.yml file`.
## Configuration
The bullseye plugin supports configuration options via your `project.yml` provided
by Ceedling. The following is a typical configuration example:
```
:bullseye:
:auto_license: TRUE
:plugins:
:bullseye_lib_path: []
:paths:
:bullseye_toolchain_include: []
:tools:
:bullseye_instrumentation:
:executable: covc
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- -q
- ${1}
:bullseye_compiler:
:executable: gcc
:arguments:
- -g
- -I"$": COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR
- -I"$": COLLECTION_PATHS_BULLSEYE_TOOLCHAIN_INCLUDE
- -D$: COLLECTION_DEFINES_TEST_AND_VENDOR
- -DBULLSEYE_COMPILER
- -c "${1}"
- -o "${2}"
:bullseye_linker:
:executable: gcc
:arguments:
- ${1}
- -o ${2}
- -L$: PLUGINS_BULLSEYE_LIB_PATH
- -lcov
:bullseye_fixture:
:executable: ${1}
:bullseye_report_covsrc:
:executable: covsrc
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- -q
- -w140
:bullseye_report_covfn:
:executable: covfn
:stderr_redirect: :auto
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- --width 120
- --no-source
- '"${1}"'
:bullseye_browser:
:executable: CoverageBrowser
:background_exec: :auto
:optional: TRUE
:arguments:
- '"$"': ENVIRONMENT_COVFILE
```
## Example Usage
```sh
ceedling bullseye:all utils:bullseye
```

View File

@ -32,12 +32,16 @@ rule(/#{BULLSEYE_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
end
rule(/#{BULLSEYE_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
lib_args = @ceedling[:test_invoker].convert_libraries_to_arguments()
lib_paths = @ceedling[:test_invoker].get_library_paths_to_arguments()
@ceedling[:generator].generate_executable_file(
TOOLS_BULLSEYE_LINKER,
BULLSEYE_SYM,
bin_file.prerequisites,
bin_file.name,
@ceedling[:file_path_utils].form_test_build_map_filepath(bin_file.name)
@ceedling[:file_path_utils].form_test_build_map_filepath(bin_file.name),
lib_args,
lib_paths
)
end
@ -69,7 +73,7 @@ namespace BULLSEYE_SYM do
task source_coverage: COLLECTION_ALL_SOURCE.pathmap("#{BULLSEYE_BUILD_OUTPUT_PATH}/%n#{@ceedling[:configurator].extension_object}")
desc 'Run code coverage for all tests'
task all: [:directories] do
task all: [:test_deps] do
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, BULLSEYE_SYM)
@ -81,18 +85,18 @@ namespace BULLSEYE_SYM do
message = "\nOops! '#{BULLSEYE_ROOT_NAME}:*' isn't a real task. " +
"Use a real test or source file name (no path) in place of the wildcard.\n" +
"Example: rake #{BULLSEYE_ROOT_NAME}:foo.c\n\n"
@ceedling[:streaminator].stdout_puts( message )
end
desc 'Run tests by matching regular expression pattern.'
task :pattern, [:regex] => [:directories] do |_t, args|
task :pattern, [:regex] => [:test_deps] do |_t, args|
matches = []
COLLECTION_ALL_TESTS.each do |test|
matches << test if test =~ /#{args.regex}/
end
if !matches.empty?
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ -104,13 +108,13 @@ namespace BULLSEYE_SYM do
end
desc 'Run tests whose test path contains [dir] or [dir] substring.'
task :path, [:dir] => [:directories] do |_t, args|
task :path, [:dir] => [:test_deps] do |_t, args|
matches = []
COLLECTION_ALL_TESTS.each do |test|
matches << test if File.dirname(test).include?(args.dir.tr('\\', '/'))
end
if !matches.empty?
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ -122,13 +126,13 @@ namespace BULLSEYE_SYM do
end
desc 'Run code coverage for changed files'
task delta: [:directories] do
task delta: [:test_deps] do
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, BULLSEYE_SYM, {:force_run => false})
@ceedling[:configurator].restore_config
end
# use a rule to increase efficiency for large projects
# bullseye test tasks by regex
rule(/^#{BULLSEYE_TASK_ROOT}\S+$/ => [
@ -138,7 +142,7 @@ namespace BULLSEYE_SYM do
@ceedling[:file_finder].find_test_from_file_path(test)
end
]) do |test|
@ceedling[:rake_wrapper][:directories].invoke
@ceedling[:rake_wrapper][:test_deps].invoke
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].setup_and_invoke([test.source], BULLSEYE_SYM)
@ -159,11 +163,11 @@ end
end
namespace UTILS_SYM do
desc "Open Bullseye code coverage browser"
task BULLSEYE_SYM do
command = @ceedling[:tool_executor].build_command_line(TOOLS_BULLSEYE_BROWSER, [])
@ceedling[:tool_executor].exec(command[:line], command[:options])
end
end

Some files were not shown because too many files have changed in this diff Show More