mirror of
https://github.com/hathach/tinyusb.git
synced 2025-04-17 17:42:43 +00:00
Merge pull request #1781 from hathach/update-unit-test
Update unit test
This commit is contained in:
commit
ab8cfb3d5b
@ -7,4 +7,4 @@ exclude-file = .codespell/exclude-file.txt
|
|||||||
check-filenames =
|
check-filenames =
|
||||||
check-hidden =
|
check-hidden =
|
||||||
count =
|
count =
|
||||||
skip = .cproject,./.git,./hw/mcu,./lib,./examples/*/*/_build,./examples/*/*/ses,./examples/*/*/ozone,./hw/mcu,./test/vendor,./tests_obsolete,./tools/uf2
|
skip = .cproject,./.git,./hw/mcu,./lib,./examples/*/*/_build,./examples/*/*/ses,./examples/*/*/ozone,./hw/mcu,./test/unit-test/vendor,./tests_obsolete,./tools/uf2
|
||||||
|
16
.github/workflows/build_aarch64.yml
vendored
16
.github/workflows/build_aarch64.yml
vendored
@ -1,11 +1,19 @@
|
|||||||
name: Build AArch64
|
name: Build AArch64
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
release:
|
paths:
|
||||||
types:
|
- 'src'
|
||||||
- created
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
paths:
|
||||||
|
- 'src'
|
||||||
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
|
16
.github/workflows/build_arm.yml
vendored
16
.github/workflows/build_arm.yml
vendored
@ -1,11 +1,19 @@
|
|||||||
name: Build ARM
|
name: Build ARM
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
release:
|
paths:
|
||||||
types:
|
- 'src'
|
||||||
- created
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
paths:
|
||||||
|
- 'src'
|
||||||
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
|
16
.github/workflows/build_esp.yml
vendored
16
.github/workflows/build_esp.yml
vendored
@ -1,11 +1,19 @@
|
|||||||
name: Build ESP
|
name: Build ESP
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
release:
|
paths:
|
||||||
types:
|
- 'src'
|
||||||
- created
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
paths:
|
||||||
|
- 'src'
|
||||||
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
|
16
.github/workflows/build_msp430.yml
vendored
16
.github/workflows/build_msp430.yml
vendored
@ -1,11 +1,19 @@
|
|||||||
name: Build MSP430
|
name: Build MSP430
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
release:
|
paths:
|
||||||
types:
|
- 'src'
|
||||||
- created
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
paths:
|
||||||
|
- 'src'
|
||||||
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
|
16
.github/workflows/build_renesas.yml
vendored
16
.github/workflows/build_renesas.yml
vendored
@ -1,11 +1,19 @@
|
|||||||
name: Build Renesas
|
name: Build Renesas
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
release:
|
paths:
|
||||||
types:
|
- 'src'
|
||||||
- created
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
paths:
|
||||||
|
- 'src'
|
||||||
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
|
16
.github/workflows/build_riscv.yml
vendored
16
.github/workflows/build_riscv.yml
vendored
@ -1,11 +1,19 @@
|
|||||||
name: Build RISC-V
|
name: Build RISC-V
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
release:
|
paths:
|
||||||
types:
|
- 'src'
|
||||||
- created
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
paths:
|
||||||
|
- 'src'
|
||||||
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
|
7
.github/workflows/pre-commit.yml
vendored
7
.github/workflows/pre-commit.yml
vendored
@ -1,8 +1,9 @@
|
|||||||
name: pre-commit
|
name: pre-commit
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
@ -20,7 +21,7 @@ jobs:
|
|||||||
- name: Setup Ruby
|
- name: Setup Ruby
|
||||||
uses: ruby/setup-ruby@v1
|
uses: ruby/setup-ruby@v1
|
||||||
with:
|
with:
|
||||||
ruby-version: '2.7'
|
ruby-version: '3.0'
|
||||||
|
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -32,5 +33,5 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Install Ceedling
|
# Install Ceedling
|
||||||
gem install ceedling
|
gem install ceedling
|
||||||
cd test
|
cd test/unit-test
|
||||||
ceedling test:all
|
ceedling test:all
|
17
.github/workflows/test_hardware.yml
vendored
17
.github/workflows/test_hardware.yml
vendored
@ -1,10 +1,19 @@
|
|||||||
name: Hardware Test
|
name: Hardware Test
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
release:
|
paths:
|
||||||
types:
|
- 'src'
|
||||||
- created
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
paths:
|
||||||
|
- 'src'
|
||||||
|
- 'examples'
|
||||||
|
- 'lib'
|
||||||
|
- 'hw'
|
||||||
|
|
||||||
# Hardware in the loop (HIL)
|
# Hardware in the loop (HIL)
|
||||||
# Current self-hosted instance is running on an EPYC 7232 server hosted by HiFiPhile user
|
# Current self-hosted instance is running on an EPYC 7232 server hosted by HiFiPhile user
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
# :release_build: TRUE
|
# :release_build: TRUE
|
||||||
:test_file_prefix: test_
|
:test_file_prefix: test_
|
||||||
:which_ceedling: vendor/ceedling
|
:which_ceedling: vendor/ceedling
|
||||||
|
:ceedling_version: 0.31.1
|
||||||
:default_tasks:
|
:default_tasks:
|
||||||
- test:all
|
- test:all
|
||||||
|
|
||||||
@ -35,7 +36,7 @@
|
|||||||
- +:test/**
|
- +:test/**
|
||||||
- -:test/support
|
- -:test/support
|
||||||
:source:
|
:source:
|
||||||
- ../src/**
|
- ../../src/**
|
||||||
:support:
|
:support:
|
||||||
- test/support
|
- test/support
|
||||||
|
|
@ -51,7 +51,7 @@
|
|||||||
|
|
||||||
// CFG_TUSB_DEBUG is defined by compiler in DEBUG build
|
// CFG_TUSB_DEBUG is defined by compiler in DEBUG build
|
||||||
#ifndef CFG_TUSB_DEBUG
|
#ifndef CFG_TUSB_DEBUG
|
||||||
#define CFG_TUSB_DEBUG 0
|
#define CFG_TUSB_DEBUG 1
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* USB DMA on some MCUs can only access a specific SRAM region with restriction on alignment.
|
/* USB DMA on some MCUs can only access a specific SRAM region with restriction on alignment.
|
@ -49,16 +49,16 @@ unless (project_found)
|
|||||||
end
|
end
|
||||||
|
|
||||||
desc "upgrade PROJECT_NAME", "upgrade ceedling for a project (not req'd if gem used)"
|
desc "upgrade PROJECT_NAME", "upgrade ceedling for a project (not req'd if gem used)"
|
||||||
method_option :docs, :type => :boolean, :default => false, :desc => "Add docs in project vendor directory"
|
|
||||||
method_option :local, :type => :boolean, :default => false, :desc => "Create a copy of Ceedling in the project vendor directory"
|
|
||||||
method_option :no_configs, :type => :boolean, :default => false, :desc => "Don't install starter configuration files"
|
|
||||||
method_option :noconfigs, :type => :boolean, :default => false
|
|
||||||
|
|
||||||
#deprecated:
|
|
||||||
method_option :no_docs, :type => :boolean, :default => false
|
|
||||||
method_option :nodocs, :type => :boolean, :default => false
|
|
||||||
def upgrade(name, silent = false)
|
def upgrade(name, silent = false)
|
||||||
copy_assets_and_create_structure(name, silent, true, options || {:upgrade => true})
|
as_local = true
|
||||||
|
begin
|
||||||
|
require "yaml"
|
||||||
|
as_local = (YAML.load_file(File.join(name, "project.yml"))[:project][:which_ceedling] != 'gem')
|
||||||
|
rescue
|
||||||
|
raise "ERROR: Could not find valid project file '#{yaml_path}'"
|
||||||
|
end
|
||||||
|
found_docs = File.exists?( File.join(name, "docs", "CeedlingPacket.md") )
|
||||||
|
copy_assets_and_create_structure(name, silent, true, {:upgrade => true, :no_configs => true, :local => as_local, :docs => found_docs})
|
||||||
end
|
end
|
||||||
|
|
||||||
no_commands do
|
no_commands do
|
||||||
@ -90,26 +90,30 @@ unless (project_found)
|
|||||||
FileUtils.touch(File.join(test_support_path, '.gitkeep'))
|
FileUtils.touch(File.join(test_support_path, '.gitkeep'))
|
||||||
|
|
||||||
# If documentation requested, create a place to dump them and do so
|
# If documentation requested, create a place to dump them and do so
|
||||||
|
doc_path = ""
|
||||||
if use_docs
|
if use_docs
|
||||||
doc_path = File.join(ceedling_path, 'docs')
|
doc_path = use_gem ? File.join(name, 'docs') : File.join(ceedling_path, 'docs')
|
||||||
FileUtils.mkdir_p doc_path
|
FileUtils.mkdir_p doc_path
|
||||||
|
|
||||||
in_doc_path = lambda {|f| File.join(doc_path, f)}
|
in_doc_path = lambda {|f| File.join(doc_path, f)}
|
||||||
|
|
||||||
doc_files = [
|
# Add documentation from main projects to list
|
||||||
'docs/CeedlingPacket.md',
|
doc_files = {}
|
||||||
'vendor/c_exception/docs/CException.md',
|
['docs','vendor/unity/docs','vendor/cmock/docs','vendor/cexception/docs'].each do |p|
|
||||||
'vendor/cmock/docs/CMock_Summary.md',
|
Dir[ File.expand_path(File.join(here, p, '*.md')) ].each do |f|
|
||||||
'vendor/unity/docs/UnityAssertionsCheatSheetSuitableforPrintingandPossiblyFraming.pdf',
|
doc_files[ File.basename(f) ] = f unless(doc_files.include? f)
|
||||||
'vendor/unity/docs/UnityAssertionsReference.md',
|
end
|
||||||
'vendor/unity/docs/UnityConfigurationGuide.md',
|
end
|
||||||
'vendor/unity/docs/UnityGettingStartedGuide.md',
|
|
||||||
'vendor/unity/docs/UnityHelperScriptsGuide.md',
|
|
||||||
'vendor/unity/docs/ThrowTheSwitchCodingStandard.md',
|
|
||||||
]
|
|
||||||
|
|
||||||
doc_files.each do |f|
|
# Add documentation from plugins to list
|
||||||
copy_file(f, in_doc_path.call(File.basename(f)), :force => force)
|
Dir[ File.join(here, 'plugins/**/README.md') ].each do |plugin_path|
|
||||||
|
k = "plugin_" + plugin_path.split(/\\|\//)[-2] + ".md"
|
||||||
|
doc_files[ k ] = File.expand_path(plugin_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Copy all documentation
|
||||||
|
doc_files.each_pair do |k, v|
|
||||||
|
copy_file(v, in_doc_path.call(k), :force => force)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -133,7 +137,6 @@ unless (project_found)
|
|||||||
{:src => 'vendor/cmock/config/', :dst => 'vendor/cmock/config'},
|
{:src => 'vendor/cmock/config/', :dst => 'vendor/cmock/config'},
|
||||||
{:src => 'vendor/cmock/lib/', :dst => 'vendor/cmock/lib'},
|
{:src => 'vendor/cmock/lib/', :dst => 'vendor/cmock/lib'},
|
||||||
{:src => 'vendor/cmock/src/', :dst => 'vendor/cmock/src'},
|
{:src => 'vendor/cmock/src/', :dst => 'vendor/cmock/src'},
|
||||||
{:src => 'vendor/deep_merge/lib/', :dst => 'vendor/deep_merge/lib'},
|
|
||||||
{:src => 'vendor/diy/lib', :dst => 'vendor/diy/lib'},
|
{:src => 'vendor/diy/lib', :dst => 'vendor/diy/lib'},
|
||||||
{:src => 'vendor/unity/auto/', :dst => 'vendor/unity/auto'},
|
{:src => 'vendor/unity/auto/', :dst => 'vendor/unity/auto'},
|
||||||
{:src => 'vendor/unity/src/', :dst => 'vendor/unity/src'},
|
{:src => 'vendor/unity/src/', :dst => 'vendor/unity/src'},
|
||||||
@ -146,16 +149,24 @@ unless (project_found)
|
|||||||
|
|
||||||
# We're copying in a configuration file if we haven't said not to
|
# We're copying in a configuration file if we haven't said not to
|
||||||
if (use_configs)
|
if (use_configs)
|
||||||
if use_gem
|
dst_yaml = File.join(name, 'project.yml')
|
||||||
copy_file(File.join('assets', 'project_as_gem.yml'), File.join(name, 'project.yml'), :force => force)
|
src_yaml = if use_gem
|
||||||
|
File.join(here, 'assets', 'project_as_gem.yml')
|
||||||
else
|
else
|
||||||
copy_file(File.join('assets', 'project_with_guts.yml'), File.join(name, 'project.yml'), :force => force)
|
|
||||||
if is_windows?
|
if is_windows?
|
||||||
copy_file(File.join('assets', 'ceedling.cmd'), File.join(name, 'ceedling.cmd'), :force => force)
|
copy_file(File.join('assets', 'ceedling.cmd'), File.join(name, 'ceedling.cmd'), :force => force)
|
||||||
else
|
else
|
||||||
copy_file(File.join('assets', 'ceedling'), File.join(name, 'ceedling'), :force => force)
|
copy_file(File.join('assets', 'ceedling'), File.join(name, 'ceedling'), :force => force)
|
||||||
File.chmod(0755, File.join(name, 'ceedling'))
|
File.chmod(0755, File.join(name, 'ceedling'))
|
||||||
end
|
end
|
||||||
|
File.join(here, 'assets', 'project_with_guts.yml')
|
||||||
|
end
|
||||||
|
|
||||||
|
# Perform the actual clone of the config file, while updating the version
|
||||||
|
File.open(dst_yaml,'w') do |dst|
|
||||||
|
require File.expand_path(File.join(File.dirname(__FILE__),"..","lib","ceedling","version.rb"))
|
||||||
|
dst << File.read(src_yaml).gsub(":ceedling_version: '?'",":ceedling_version: #{Ceedling::Version::CEEDLING}")
|
||||||
|
puts " create #{dst_yaml}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -167,8 +178,8 @@ unless (project_found)
|
|||||||
unless silent
|
unless silent
|
||||||
puts "\n"
|
puts "\n"
|
||||||
puts "Project '#{name}' #{force ? "upgraded" : "created"}!"
|
puts "Project '#{name}' #{force ? "upgraded" : "created"}!"
|
||||||
puts " - Tool documentation is located in vendor/ceedling/docs" if use_docs
|
puts " - Tool documentation is located in #{doc_path}" if use_docs
|
||||||
puts " - Execute 'ceedling help' to view available test & build tasks"
|
puts " - Execute 'ceedling help' from #{name} to view available test & build tasks"
|
||||||
puts ''
|
puts ''
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -206,10 +217,10 @@ unless (project_found)
|
|||||||
|
|
||||||
desc "version", "return the version of the tools installed"
|
desc "version", "return the version of the tools installed"
|
||||||
def version()
|
def version()
|
||||||
require 'ceedling/version.rb'
|
require File.expand_path(File.join(File.dirname(__FILE__),"..","lib","ceedling","version.rb"))
|
||||||
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
|
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
|
||||||
puts " CMock:: #{Ceedling::Version::CMOCK}"
|
puts " CMock:: #{Ceedling::Version::CMOCK}"
|
||||||
puts " Unity:: #{Ceedling::Version::UNITY}"
|
puts " Unity:: #{Ceedling::Version::UNITY}"
|
||||||
puts " CException:: #{Ceedling::Version::CEXCEPTION}"
|
puts " CException:: #{Ceedling::Version::CEXCEPTION}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -287,6 +298,8 @@ else
|
|||||||
options[:list_tasks] = true
|
options[:list_tasks] = true
|
||||||
when /^-T$/
|
when /^-T$/
|
||||||
options[:list_tasks] = true
|
options[:list_tasks] = true
|
||||||
|
when /^--tasks$/
|
||||||
|
options[:list_tasks] = true
|
||||||
when /^project:(\w+)/
|
when /^project:(\w+)/
|
||||||
ENV['CEEDLING_USER_PROJECT_FILE'] = "#{$1}.yml"
|
ENV['CEEDLING_USER_PROJECT_FILE'] = "#{$1}.yml"
|
||||||
else
|
else
|
@ -9,7 +9,7 @@ class BuildInvokerUtils
|
|||||||
##
|
##
|
||||||
# Processes exceptions and tries to display a useful message for the user.
|
# Processes exceptions and tries to display a useful message for the user.
|
||||||
#
|
#
|
||||||
# ==== Attriboops...utes
|
# ==== Attributes
|
||||||
#
|
#
|
||||||
# * _exception_: The exception given by a rescue statement.
|
# * _exception_: The exception given by a rescue statement.
|
||||||
# * _context_: A symbol representing where in the build the exception
|
# * _context_: A symbol representing where in the build the exception
|
35
test/unit-test/vendor/ceedling/lib/ceedling/cacheinator_helper.rb
vendored
Normal file
35
test/unit-test/vendor/ceedling/lib/ceedling/cacheinator_helper.rb
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
|
||||||
|
class CacheinatorHelper
|
||||||
|
|
||||||
|
constructor :file_wrapper, :yaml_wrapper
|
||||||
|
|
||||||
|
def diff_cached_config?(cached_filepath, hash)
|
||||||
|
return false if ( not @file_wrapper.exist?(cached_filepath) )
|
||||||
|
return true if (@yaml_wrapper.load(cached_filepath) != hash)
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
|
||||||
|
def diff_cached_defines?(cached_filepath, files)
|
||||||
|
changed_defines = false
|
||||||
|
current_defines = COLLECTION_DEFINES_TEST_AND_VENDOR.reject(&:empty?)
|
||||||
|
|
||||||
|
current_dependencies = Hash[files.collect { |source| [source, current_defines.dup] }]
|
||||||
|
if not @file_wrapper.exist?(cached_filepath)
|
||||||
|
@yaml_wrapper.dump(cached_filepath, current_dependencies)
|
||||||
|
return changed_defines
|
||||||
|
end
|
||||||
|
|
||||||
|
dependencies = @yaml_wrapper.load(cached_filepath)
|
||||||
|
common_dependencies = current_dependencies.select { |file, defines| dependencies.has_key?(file) }
|
||||||
|
|
||||||
|
if dependencies.values_at(*common_dependencies.keys) != common_dependencies.values
|
||||||
|
changed_defines = true
|
||||||
|
end
|
||||||
|
|
||||||
|
dependencies.merge!(current_dependencies)
|
||||||
|
@yaml_wrapper.dump(cached_filepath, dependencies)
|
||||||
|
|
||||||
|
return changed_defines
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
@ -54,6 +54,7 @@ class Configurator
|
|||||||
:test_fixture,
|
:test_fixture,
|
||||||
:test_includes_preprocessor,
|
:test_includes_preprocessor,
|
||||||
:test_file_preprocessor,
|
:test_file_preprocessor,
|
||||||
|
:test_file_preprocessor_directives,
|
||||||
:test_dependencies_generator,
|
:test_dependencies_generator,
|
||||||
:release_compiler,
|
:release_compiler,
|
||||||
:release_assembler,
|
:release_assembler,
|
||||||
@ -183,17 +184,22 @@ class Configurator
|
|||||||
@rake_plugins = @configurator_plugins.find_rake_plugins(config, paths_hash)
|
@rake_plugins = @configurator_plugins.find_rake_plugins(config, paths_hash)
|
||||||
@script_plugins = @configurator_plugins.find_script_plugins(config, paths_hash)
|
@script_plugins = @configurator_plugins.find_script_plugins(config, paths_hash)
|
||||||
config_plugins = @configurator_plugins.find_config_plugins(config, paths_hash)
|
config_plugins = @configurator_plugins.find_config_plugins(config, paths_hash)
|
||||||
plugin_defaults = @configurator_plugins.find_plugin_defaults(config, paths_hash)
|
plugin_yml_defaults = @configurator_plugins.find_plugin_yml_defaults(config, paths_hash)
|
||||||
|
plugin_hash_defaults = @configurator_plugins.find_plugin_hash_defaults(config, paths_hash)
|
||||||
|
|
||||||
config_plugins.each do |plugin|
|
config_plugins.each do |plugin|
|
||||||
plugin_config = @yaml_wrapper.load(plugin)
|
plugin_config = @yaml_wrapper.load(plugin)
|
||||||
config.deep_merge(plugin_config)
|
config.deep_merge(plugin_config)
|
||||||
end
|
end
|
||||||
|
|
||||||
plugin_defaults.each do |defaults|
|
plugin_yml_defaults.each do |defaults|
|
||||||
@configurator_builder.populate_defaults( config, @yaml_wrapper.load(defaults) )
|
@configurator_builder.populate_defaults( config, @yaml_wrapper.load(defaults) )
|
||||||
end
|
end
|
||||||
|
|
||||||
|
plugin_hash_defaults.each do |defaults|
|
||||||
|
@configurator_builder.populate_defaults( config, defaults )
|
||||||
|
end
|
||||||
|
|
||||||
# special plugin setting for results printing
|
# special plugin setting for results printing
|
||||||
config[:plugins][:display_raw_test_results] = true if (config[:plugins][:display_raw_test_results].nil?)
|
config[:plugins][:display_raw_test_results] = true if (config[:plugins][:display_raw_test_results].nil?)
|
||||||
|
|
||||||
@ -203,10 +209,19 @@ class Configurator
|
|||||||
|
|
||||||
def merge_imports(config)
|
def merge_imports(config)
|
||||||
if config[:import]
|
if config[:import]
|
||||||
until config[:import].empty?
|
if config[:import].is_a? Array
|
||||||
path = config[:import].shift
|
until config[:import].empty?
|
||||||
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
|
path = config[:import].shift
|
||||||
config.deep_merge!(@yaml_wrapper.load(path))
|
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
|
||||||
|
config.deep_merge!(@yaml_wrapper.load(path))
|
||||||
|
end
|
||||||
|
else
|
||||||
|
config[:import].each_value do |path|
|
||||||
|
if !path.nil?
|
||||||
|
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
|
||||||
|
config.deep_merge!(@yaml_wrapper.load(path))
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
config.delete(:import)
|
config.delete(:import)
|
||||||
@ -222,7 +237,11 @@ class Configurator
|
|||||||
interstitial = ((key == :path) ? File::PATH_SEPARATOR : '')
|
interstitial = ((key == :path) ? File::PATH_SEPARATOR : '')
|
||||||
items = ((value.class == Array) ? hash[key] : [value])
|
items = ((value.class == Array) ? hash[key] : [value])
|
||||||
|
|
||||||
items.each { |item| item.replace( @system_wrapper.module_eval( item ) ) if (item =~ RUBY_STRING_REPLACEMENT_PATTERN) }
|
items.each do |item|
|
||||||
|
if item.is_a? String and item =~ RUBY_STRING_REPLACEMENT_PATTERN
|
||||||
|
item.replace( @system_wrapper.module_eval( item ) )
|
||||||
|
end
|
||||||
|
end
|
||||||
hash[key] = items.join( interstitial )
|
hash[key] = items.join( interstitial )
|
||||||
|
|
||||||
@system_wrapper.env_set( key.to_s.upcase, hash[key] )
|
@system_wrapper.env_set( key.to_s.upcase, hash[key] )
|
@ -250,8 +250,8 @@ class ConfiguratorBuilder
|
|||||||
def collect_test_support_source_include_vendor_paths(in_hash)
|
def collect_test_support_source_include_vendor_paths(in_hash)
|
||||||
return {
|
return {
|
||||||
:collection_paths_test_support_source_include_vendor =>
|
:collection_paths_test_support_source_include_vendor =>
|
||||||
in_hash[:collection_paths_test_support_source_include] +
|
get_vendor_paths(in_hash) +
|
||||||
get_vendor_paths(in_hash)
|
in_hash[:collection_paths_test_support_source_include]
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -384,14 +384,26 @@ class ConfiguratorBuilder
|
|||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
|
def get_vendor_defines(in_hash)
|
||||||
|
defines = in_hash[:unity_defines].clone
|
||||||
|
defines.concat(in_hash[:cmock_defines]) if (in_hash[:project_use_mocks])
|
||||||
|
defines.concat(in_hash[:cexception_defines]) if (in_hash[:project_use_exceptions])
|
||||||
|
|
||||||
|
return defines
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
def collect_vendor_defines(in_hash)
|
||||||
|
return {:collection_defines_vendor => get_vendor_defines(in_hash)}
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
def collect_test_and_vendor_defines(in_hash)
|
def collect_test_and_vendor_defines(in_hash)
|
||||||
test_defines = in_hash[:defines_test].clone
|
defines = in_hash[:defines_test].clone
|
||||||
|
vendor_defines = get_vendor_defines(in_hash)
|
||||||
|
defines.concat(vendor_defines) if vendor_defines
|
||||||
|
|
||||||
test_defines.concat(in_hash[:unity_defines])
|
return {:collection_defines_test_and_vendor => defines}
|
||||||
test_defines.concat(in_hash[:cmock_defines]) if (in_hash[:project_use_mocks])
|
|
||||||
test_defines.concat(in_hash[:cexception_defines]) if (in_hash[:project_use_exceptions])
|
|
||||||
|
|
||||||
return {:collection_defines_test_and_vendor => test_defines}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
@ -418,28 +430,33 @@ class ConfiguratorBuilder
|
|||||||
# Note: Symbols passed to compiler at command line can change Unity and CException behavior / configuration;
|
# Note: Symbols passed to compiler at command line can change Unity and CException behavior / configuration;
|
||||||
# we also handle those dependencies elsewhere in compilation dependencies
|
# we also handle those dependencies elsewhere in compilation dependencies
|
||||||
|
|
||||||
objects = [UNITY_C_FILE]
|
sources = [UNITY_C_FILE]
|
||||||
|
|
||||||
in_hash[:files_support].each { |file| objects << File.basename(file) }
|
in_hash[:files_support].each { |file| sources << file }
|
||||||
|
|
||||||
# we don't include paths here because use of plugins or mixing different compilers may require different build paths
|
# we don't include paths here because use of plugins or mixing different compilers may require different build paths
|
||||||
objects << CEXCEPTION_C_FILE if (in_hash[:project_use_exceptions])
|
sources << CEXCEPTION_C_FILE if (in_hash[:project_use_exceptions])
|
||||||
objects << CMOCK_C_FILE if (in_hash[:project_use_mocks])
|
sources << CMOCK_C_FILE if (in_hash[:project_use_mocks])
|
||||||
|
|
||||||
# if we're using mocks & a unity helper is defined & that unity helper includes a source file component (not only a header of macros),
|
# if we're using mocks & a unity helper is defined & that unity helper includes a source file component (not only a header of macros),
|
||||||
# then link in the unity_helper object file too
|
# then link in the unity_helper object file too
|
||||||
if ( in_hash[:project_use_mocks] and in_hash[:cmock_unity_helper] )
|
if ( in_hash[:project_use_mocks] and in_hash[:cmock_unity_helper] )
|
||||||
in_hash[:cmock_unity_helper].each do |helper|
|
in_hash[:cmock_unity_helper].each do |helper|
|
||||||
if @file_wrapper.exist?(helper.ext(in_hash[:extension_source]))
|
if @file_wrapper.exist?(helper.ext(in_hash[:extension_source]))
|
||||||
objects << File.basename(helper)
|
sources << helper
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# create object files from all the sources
|
||||||
|
objects = sources.map { |file| File.basename(file) }
|
||||||
|
|
||||||
# no build paths here so plugins can remap if necessary (i.e. path mapping happens at runtime)
|
# no build paths here so plugins can remap if necessary (i.e. path mapping happens at runtime)
|
||||||
objects.map! { |object| object.ext(in_hash[:extension_object]) }
|
objects.map! { |object| object.ext(in_hash[:extension_object]) }
|
||||||
|
|
||||||
return { :collection_test_fixture_extra_link_objects => objects }
|
return { :collection_all_support => sources,
|
||||||
|
:collection_test_fixture_extra_link_objects => objects
|
||||||
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -26,6 +26,7 @@ class ConfiguratorPlugins
|
|||||||
|
|
||||||
if is_script_plugin
|
if is_script_plugin
|
||||||
@system_wrapper.add_load_path( File.join( path, 'lib') )
|
@system_wrapper.add_load_path( File.join( path, 'lib') )
|
||||||
|
@system_wrapper.add_load_path( File.join( path, 'config') )
|
||||||
end
|
end
|
||||||
break
|
break
|
||||||
end
|
end
|
||||||
@ -92,7 +93,7 @@ class ConfiguratorPlugins
|
|||||||
|
|
||||||
|
|
||||||
# gather up and return default .yml filepaths that exist on-disk
|
# gather up and return default .yml filepaths that exist on-disk
|
||||||
def find_plugin_defaults(config, plugin_paths)
|
def find_plugin_yml_defaults(config, plugin_paths)
|
||||||
defaults_with_path = []
|
defaults_with_path = []
|
||||||
|
|
||||||
config[:plugins][:enabled].each do |plugin|
|
config[:plugins][:enabled].each do |plugin|
|
||||||
@ -108,4 +109,23 @@ class ConfiguratorPlugins
|
|||||||
return defaults_with_path
|
return defaults_with_path
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# gather up and return
|
||||||
|
def find_plugin_hash_defaults(config, plugin_paths)
|
||||||
|
defaults_hash= []
|
||||||
|
|
||||||
|
config[:plugins][:enabled].each do |plugin|
|
||||||
|
if path = plugin_paths[(plugin + '_path').to_sym]
|
||||||
|
default_path = File.join(path, "config", "defaults_#{plugin}.rb")
|
||||||
|
if @file_wrapper.exist?(default_path)
|
||||||
|
@system_wrapper.require_file( "defaults_#{plugin}.rb")
|
||||||
|
|
||||||
|
object = eval("get_default_config()")
|
||||||
|
defaults_hash << object
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return defaults_hash
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
@ -39,6 +39,7 @@ class ConfiguratorSetup
|
|||||||
flattened_config.merge!(@configurator_builder.collect_headers(flattened_config))
|
flattened_config.merge!(@configurator_builder.collect_headers(flattened_config))
|
||||||
flattened_config.merge!(@configurator_builder.collect_release_existing_compilation_input(flattened_config))
|
flattened_config.merge!(@configurator_builder.collect_release_existing_compilation_input(flattened_config))
|
||||||
flattened_config.merge!(@configurator_builder.collect_all_existing_compilation_input(flattened_config))
|
flattened_config.merge!(@configurator_builder.collect_all_existing_compilation_input(flattened_config))
|
||||||
|
flattened_config.merge!(@configurator_builder.collect_vendor_defines(flattened_config))
|
||||||
flattened_config.merge!(@configurator_builder.collect_test_and_vendor_defines(flattened_config))
|
flattened_config.merge!(@configurator_builder.collect_test_and_vendor_defines(flattened_config))
|
||||||
flattened_config.merge!(@configurator_builder.collect_release_and_vendor_defines(flattened_config))
|
flattened_config.merge!(@configurator_builder.collect_release_and_vendor_defines(flattened_config))
|
||||||
flattened_config.merge!(@configurator_builder.collect_release_artifact_extra_link_objects(flattened_config))
|
flattened_config.merge!(@configurator_builder.collect_release_artifact_extra_link_objects(flattened_config))
|
@ -95,3 +95,5 @@ NULL_FILE_PATH = '/dev/null'
|
|||||||
|
|
||||||
TESTS_BASE_PATH = TEST_ROOT_NAME
|
TESTS_BASE_PATH = TEST_ROOT_NAME
|
||||||
RELEASE_BASE_PATH = RELEASE_ROOT_NAME
|
RELEASE_BASE_PATH = RELEASE_ROOT_NAME
|
||||||
|
|
||||||
|
VENDORS_FILES = %w(unity UnityHelper cmock CException).freeze
|
@ -7,17 +7,20 @@ CEEDLING_VENDOR = File.expand_path(File.dirname(__FILE__) + '/../../vendor') unl
|
|||||||
CEEDLING_PLUGINS = [] unless defined? CEEDLING_PLUGINS
|
CEEDLING_PLUGINS = [] unless defined? CEEDLING_PLUGINS
|
||||||
|
|
||||||
DEFAULT_TEST_COMPILER_TOOL = {
|
DEFAULT_TEST_COMPILER_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
|
||||||
:name => 'default_test_compiler'.freeze,
|
:name => 'default_test_compiler'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
|
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
|
||||||
|
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
|
||||||
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
|
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
|
||||||
"-DGNU_COMPILER".freeze,
|
"-DGNU_COMPILER".freeze,
|
||||||
"-g".freeze,
|
"-g".freeze,
|
||||||
|
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
|
||||||
"-c \"${1}\"".freeze,
|
"-c \"${1}\"".freeze,
|
||||||
"-o \"${2}\"".freeze,
|
"-o \"${2}\"".freeze,
|
||||||
# gcc's list file output options are complex; no use of ${3} parameter in default config
|
# gcc's list file output options are complex; no use of ${3} parameter in default config
|
||||||
@ -27,16 +30,21 @@ DEFAULT_TEST_COMPILER_TOOL = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_TEST_LINKER_TOOL = {
|
DEFAULT_TEST_LINKER_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
:executable => ENV['CCLD'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CCLD'].split[0],
|
||||||
:name => 'default_test_linker'.freeze,
|
:name => 'default_test_linker'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
|
ENV['CCLD'].nil? ? "" : ENV['CCLD'].split[1..-1],
|
||||||
|
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
|
||||||
|
ENV['LDFLAGS'].nil? ? "" : ENV['LDFLAGS'].split,
|
||||||
"\"${1}\"".freeze,
|
"\"${1}\"".freeze,
|
||||||
|
"${5}".freeze,
|
||||||
"-o \"${2}\"".freeze,
|
"-o \"${2}\"".freeze,
|
||||||
"".freeze,
|
"".freeze,
|
||||||
"${4}".freeze
|
"${4}".freeze,
|
||||||
|
ENV['LDLIBS'].nil? ? "" : ENV['LDLIBS'].split
|
||||||
].freeze
|
].freeze
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,12 +58,14 @@ DEFAULT_TEST_FIXTURE_TOOL = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL = {
|
DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
|
||||||
:name => 'default_test_includes_preprocessor'.freeze,
|
:name => 'default_test_includes_preprocessor'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
|
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
|
||||||
|
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
|
||||||
'-E'.freeze, # OSX clang
|
'-E'.freeze, # OSX clang
|
||||||
'-MM'.freeze,
|
'-MM'.freeze,
|
||||||
'-MG'.freeze,
|
'-MG'.freeze,
|
||||||
@ -67,18 +77,38 @@ DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL = {
|
|||||||
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
|
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
|
||||||
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
|
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
|
||||||
"-DGNU_COMPILER".freeze, # OSX clang
|
"-DGNU_COMPILER".freeze, # OSX clang
|
||||||
'-w'.freeze,
|
|
||||||
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
|
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
|
||||||
"\"${1}\"".freeze
|
"\"${1}\"".freeze
|
||||||
].freeze
|
].freeze
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_TEST_FILE_PREPROCESSOR_TOOL = {
|
DEFAULT_TEST_FILE_PREPROCESSOR_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
|
||||||
:name => 'default_test_file_preprocessor'.freeze,
|
:name => 'default_test_file_preprocessor'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
|
:arguments => [
|
||||||
|
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
|
||||||
|
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
|
||||||
|
'-E'.freeze,
|
||||||
|
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
|
||||||
|
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
|
||||||
|
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
|
||||||
|
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
|
||||||
|
"-DGNU_COMPILER".freeze,
|
||||||
|
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
|
||||||
|
"\"${1}\"".freeze,
|
||||||
|
"-o \"${2}\"".freeze
|
||||||
|
].freeze
|
||||||
|
}
|
||||||
|
|
||||||
|
DEFAULT_TEST_FILE_PREPROCESSOR_DIRECTIVES_TOOL = {
|
||||||
|
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
||||||
|
:name => 'default_test_file_preprocessor_directives'.freeze,
|
||||||
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
'-E'.freeze,
|
'-E'.freeze,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
|
||||||
@ -86,6 +116,7 @@ DEFAULT_TEST_FILE_PREPROCESSOR_TOOL = {
|
|||||||
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
|
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
|
||||||
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
|
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
|
||||||
"-DGNU_COMPILER".freeze,
|
"-DGNU_COMPILER".freeze,
|
||||||
|
'-fdirectives-only'.freeze,
|
||||||
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
|
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
|
||||||
"\"${1}\"".freeze,
|
"\"${1}\"".freeze,
|
||||||
"-o \"${2}\"".freeze
|
"-o \"${2}\"".freeze
|
||||||
@ -100,12 +131,14 @@ else
|
|||||||
end
|
end
|
||||||
|
|
||||||
DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL = {
|
DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
|
||||||
:name => 'default_test_dependencies_generator'.freeze,
|
:name => 'default_test_dependencies_generator'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
|
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
|
||||||
|
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
|
||||||
'-E'.freeze,
|
'-E'.freeze,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
|
||||||
@ -123,12 +156,14 @@ DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL = {
|
DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
|
||||||
:name => 'default_release_dependencies_generator'.freeze,
|
:name => 'default_release_dependencies_generator'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
|
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
|
||||||
|
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
|
||||||
'-E'.freeze,
|
'-E'.freeze,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
|
||||||
@ -147,16 +182,19 @@ DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL = {
|
|||||||
|
|
||||||
|
|
||||||
DEFAULT_RELEASE_COMPILER_TOOL = {
|
DEFAULT_RELEASE_COMPILER_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
|
||||||
:name => 'default_release_compiler'.freeze,
|
:name => 'default_release_compiler'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
|
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
|
||||||
|
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
|
||||||
{"-D$" => 'COLLECTION_DEFINES_RELEASE_AND_VENDOR'}.freeze,
|
{"-D$" => 'COLLECTION_DEFINES_RELEASE_AND_VENDOR'}.freeze,
|
||||||
"-DGNU_COMPILER".freeze,
|
"-DGNU_COMPILER".freeze,
|
||||||
|
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
|
||||||
"-c \"${1}\"".freeze,
|
"-c \"${1}\"".freeze,
|
||||||
"-o \"${2}\"".freeze,
|
"-o \"${2}\"".freeze,
|
||||||
# gcc's list file output options are complex; no use of ${3} parameter in default config
|
# gcc's list file output options are complex; no use of ${3} parameter in default config
|
||||||
@ -166,12 +204,14 @@ DEFAULT_RELEASE_COMPILER_TOOL = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_RELEASE_ASSEMBLER_TOOL = {
|
DEFAULT_RELEASE_ASSEMBLER_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('as').freeze,
|
:executable => ENV['AS'].nil? ? FilePathUtils.os_executable_ext('as').freeze : ENV['AS'].split[0],
|
||||||
:name => 'default_release_assembler'.freeze,
|
:name => 'default_release_assembler'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
|
ENV['AS'].nil? ? "" : ENV['AS'].split[1..-1],
|
||||||
|
ENV['ASFLAGS'].nil? ? "" : ENV['ASFLAGS'].split,
|
||||||
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_AND_INCLUDE'}.freeze,
|
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_AND_INCLUDE'}.freeze,
|
||||||
"\"${1}\"".freeze,
|
"\"${1}\"".freeze,
|
||||||
"-o \"${2}\"".freeze,
|
"-o \"${2}\"".freeze,
|
||||||
@ -179,16 +219,21 @@ DEFAULT_RELEASE_ASSEMBLER_TOOL = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_RELEASE_LINKER_TOOL = {
|
DEFAULT_RELEASE_LINKER_TOOL = {
|
||||||
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
|
:executable => ENV['CCLD'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CCLD'].split[0],
|
||||||
:name => 'default_release_linker'.freeze,
|
:name => 'default_release_linker'.freeze,
|
||||||
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
:stderr_redirect => StdErrRedirect::NONE.freeze,
|
||||||
:background_exec => BackgroundExec::NONE.freeze,
|
:background_exec => BackgroundExec::NONE.freeze,
|
||||||
:optional => false.freeze,
|
:optional => false.freeze,
|
||||||
:arguments => [
|
:arguments => [
|
||||||
|
ENV['CCLD'].nil? ? "" : ENV['CCLD'].split[1..-1],
|
||||||
|
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
|
||||||
|
ENV['LDFLAGS'].nil? ? "" : ENV['LDFLAGS'].split,
|
||||||
"\"${1}\"".freeze,
|
"\"${1}\"".freeze,
|
||||||
|
"${5}".freeze,
|
||||||
"-o \"${2}\"".freeze,
|
"-o \"${2}\"".freeze,
|
||||||
"".freeze,
|
"".freeze,
|
||||||
"${4}".freeze
|
"${4}".freeze,
|
||||||
|
ENV['LDLIBS'].nil? ? "" : ENV['LDLIBS'].split
|
||||||
].freeze
|
].freeze
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -205,6 +250,7 @@ DEFAULT_TOOLS_TEST_PREPROCESSORS = {
|
|||||||
:tools => {
|
:tools => {
|
||||||
:test_includes_preprocessor => DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL,
|
:test_includes_preprocessor => DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL,
|
||||||
:test_file_preprocessor => DEFAULT_TEST_FILE_PREPROCESSOR_TOOL,
|
:test_file_preprocessor => DEFAULT_TEST_FILE_PREPROCESSOR_TOOL,
|
||||||
|
:test_file_preprocessor_directives => DEFAULT_TEST_FILE_PREPROCESSOR_DIRECTIVES_TOOL,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -245,8 +291,10 @@ DEFAULT_CEEDLING_CONFIG = {
|
|||||||
:compile_threads => 1,
|
:compile_threads => 1,
|
||||||
:test_threads => 1,
|
:test_threads => 1,
|
||||||
:use_test_preprocessor => false,
|
:use_test_preprocessor => false,
|
||||||
|
:use_preprocessor_directives => false,
|
||||||
:use_deep_dependencies => false,
|
:use_deep_dependencies => false,
|
||||||
:generate_deep_dependencies => true, # only applicable if use_deep_dependencies is true
|
:generate_deep_dependencies => true, # only applicable if use_deep_dependencies is true
|
||||||
|
:auto_link_deep_dependencies => false,
|
||||||
:test_file_prefix => 'test_',
|
:test_file_prefix => 'test_',
|
||||||
:options_paths => [],
|
:options_paths => [],
|
||||||
:release_build => false,
|
:release_build => false,
|
||||||
@ -263,6 +311,7 @@ DEFAULT_CEEDLING_CONFIG = {
|
|||||||
:source => [], # must be populated by user
|
:source => [], # must be populated by user
|
||||||
:support => [],
|
:support => [],
|
||||||
:include => [],
|
:include => [],
|
||||||
|
:libraries => [],
|
||||||
:test_toolchain_include => [],
|
:test_toolchain_include => [],
|
||||||
:release_toolchain_include => [],
|
:release_toolchain_include => [],
|
||||||
},
|
},
|
||||||
@ -290,6 +339,8 @@ DEFAULT_CEEDLING_CONFIG = {
|
|||||||
},
|
},
|
||||||
|
|
||||||
:libraries => {
|
:libraries => {
|
||||||
|
:flag => '-l${1}',
|
||||||
|
:path_flag => '-L ${1}',
|
||||||
:test => [],
|
:test => [],
|
||||||
:test_preprocess => [],
|
:test_preprocess => [],
|
||||||
:release => [],
|
:release => [],
|
||||||
@ -303,6 +354,7 @@ DEFAULT_CEEDLING_CONFIG = {
|
|||||||
:source => '.c',
|
:source => '.c',
|
||||||
:assembly => '.s',
|
:assembly => '.s',
|
||||||
:object => '.o',
|
:object => '.o',
|
||||||
|
:libraries => ['.a','.so'],
|
||||||
:executable => ( SystemWrapper.windows? ? EXTENSION_WIN_EXE : EXTENSION_NONWIN_EXE ),
|
:executable => ( SystemWrapper.windows? ? EXTENSION_WIN_EXE : EXTENSION_NONWIN_EXE ),
|
||||||
:map => '.map',
|
:map => '.map',
|
||||||
:list => '.lst',
|
:list => '.lst',
|
||||||
@ -345,6 +397,7 @@ DEFAULT_CEEDLING_CONFIG = {
|
|||||||
},
|
},
|
||||||
:test_includes_preprocessor => { :arguments => [] },
|
:test_includes_preprocessor => { :arguments => [] },
|
||||||
:test_file_preprocessor => { :arguments => [] },
|
:test_file_preprocessor => { :arguments => [] },
|
||||||
|
:test_file_preprocessor_directives => { :arguments => [] },
|
||||||
:test_dependencies_generator => { :arguments => [] },
|
:test_dependencies_generator => { :arguments => [] },
|
||||||
:release_compiler => { :arguments => [] },
|
:release_compiler => { :arguments => [] },
|
||||||
:release_linker => { :arguments => [] },
|
:release_linker => { :arguments => [] },
|
@ -86,13 +86,12 @@ class Dependinator
|
|||||||
|
|
||||||
|
|
||||||
def enhance_results_dependencies(result_filepath)
|
def enhance_results_dependencies(result_filepath)
|
||||||
@rake_wrapper[result_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
|
@rake_wrapper[result_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if @project_config_manager.test_config_changed
|
||||||
@project_config_manager.test_defines_changed)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
def setup_test_executable_dependencies(test, objects)
|
def enhance_test_executable_dependencies(test, objects)
|
||||||
@rake_wrapper.create_file_task( @file_path_utils.form_test_executable_filepath(test), objects )
|
@rake_wrapper[ @file_path_utils.form_test_executable_filepath(test) ].enhance( objects )
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
@ -25,10 +25,12 @@ class FileFinderHelper
|
|||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
case (complain)
|
if file_to_find.nil?
|
||||||
when :error then blow_up(file_name, extra_message) if (file_to_find.nil?)
|
case (complain)
|
||||||
when :warn then gripe(file_name, extra_message) if (file_to_find.nil?)
|
when :error then blow_up(file_name, extra_message)
|
||||||
#when :ignore then
|
when :warn then gripe(file_name, extra_message)
|
||||||
|
#when :ignore then
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
return file_to_find
|
return file_to_find
|
@ -21,9 +21,11 @@ class FilePathUtils
|
|||||||
|
|
||||||
# standardize path to use '/' path separator & have no trailing path separator
|
# standardize path to use '/' path separator & have no trailing path separator
|
||||||
def self.standardize(path)
|
def self.standardize(path)
|
||||||
path.strip!
|
if path.is_a? String
|
||||||
path.gsub!(/\\/, '/')
|
path.strip!
|
||||||
path.chomp!('/')
|
path.gsub!(/\\/, '/')
|
||||||
|
path.chomp!('/')
|
||||||
|
end
|
||||||
return path
|
return path
|
||||||
end
|
end
|
||||||
|
|
@ -33,15 +33,15 @@ class FileWrapper
|
|||||||
end
|
end
|
||||||
|
|
||||||
def rm_f(filepath, options={})
|
def rm_f(filepath, options={})
|
||||||
FileUtils.rm_f(filepath, options)
|
FileUtils.rm_f(filepath, **options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def rm_r(filepath, options={})
|
def rm_r(filepath, options={})
|
||||||
FileUtils.rm_r(filepath, options={})
|
FileUtils.rm_r(filepath, **options={})
|
||||||
end
|
end
|
||||||
|
|
||||||
def cp(source, destination, options={})
|
def cp(source, destination, options={})
|
||||||
FileUtils.cp(source, destination, options)
|
FileUtils.cp(source, destination, **options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def compare(from, to)
|
def compare(from, to)
|
||||||
@ -59,7 +59,7 @@ class FileWrapper
|
|||||||
end
|
end
|
||||||
|
|
||||||
def touch(filepath, options={})
|
def touch(filepath, options={})
|
||||||
FileUtils.touch(filepath, options)
|
FileUtils.touch(filepath, **options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def write(filepath, contents, flags='w')
|
def write(filepath, contents, flags='w')
|
@ -101,19 +101,21 @@ class Generator
|
|||||||
shell_result = ex.shell_result
|
shell_result = ex.shell_result
|
||||||
raise ex
|
raise ex
|
||||||
ensure
|
ensure
|
||||||
|
arg_hash[:shell_command] = command[:line]
|
||||||
arg_hash[:shell_result] = shell_result
|
arg_hash[:shell_result] = shell_result
|
||||||
@plugin_manager.post_compile_execute(arg_hash)
|
@plugin_manager.post_compile_execute(arg_hash)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def generate_executable_file(tool, context, objects, executable, map='', libraries=[])
|
def generate_executable_file(tool, context, objects, executable, map='', libraries=[], libpaths=[])
|
||||||
shell_result = {}
|
shell_result = {}
|
||||||
arg_hash = { :tool => tool,
|
arg_hash = { :tool => tool,
|
||||||
:context => context,
|
:context => context,
|
||||||
:objects => objects,
|
:objects => objects,
|
||||||
:executable => executable,
|
:executable => executable,
|
||||||
:map => map,
|
:map => map,
|
||||||
:libraries => libraries
|
:libraries => libraries,
|
||||||
|
:libpaths => libpaths
|
||||||
}
|
}
|
||||||
|
|
||||||
@plugin_manager.pre_link_execute(arg_hash)
|
@plugin_manager.pre_link_execute(arg_hash)
|
||||||
@ -125,7 +127,8 @@ class Generator
|
|||||||
arg_hash[:objects],
|
arg_hash[:objects],
|
||||||
arg_hash[:executable],
|
arg_hash[:executable],
|
||||||
arg_hash[:map],
|
arg_hash[:map],
|
||||||
arg_hash[:libraries]
|
arg_hash[:libraries],
|
||||||
|
arg_hash[:libpaths]
|
||||||
)
|
)
|
||||||
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)
|
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)
|
||||||
|
|
@ -37,6 +37,10 @@ class GeneratorTestResults
|
|||||||
elements = extract_line_elements(line, results[:source][:file])
|
elements = extract_line_elements(line, results[:source][:file])
|
||||||
results[:successes] << elements[0]
|
results[:successes] << elements[0]
|
||||||
results[:stdout] << elements[1] if (!elements[1].nil?)
|
results[:stdout] << elements[1] if (!elements[1].nil?)
|
||||||
|
when /(:PASS \(.* ms\)$)/
|
||||||
|
elements = extract_line_elements(line, results[:source][:file])
|
||||||
|
results[:successes] << elements[0]
|
||||||
|
results[:stdout] << elements[1] if (!elements[1].nil?)
|
||||||
when /(:FAIL)/
|
when /(:FAIL)/
|
||||||
elements = extract_line_elements(line, results[:source][:file])
|
elements = extract_line_elements(line, results[:source][:file])
|
||||||
results[:failures] << elements[0]
|
results[:failures] << elements[0]
|
||||||
@ -73,6 +77,7 @@ class GeneratorTestResults
|
|||||||
# handle anything preceding filename in line as extra output to be collected
|
# handle anything preceding filename in line as extra output to be collected
|
||||||
stdout = nil
|
stdout = nil
|
||||||
stdout_regex = /(.+)#{Regexp.escape(filename)}.+/i
|
stdout_regex = /(.+)#{Regexp.escape(filename)}.+/i
|
||||||
|
unity_test_time = 0
|
||||||
|
|
||||||
if (line =~ stdout_regex)
|
if (line =~ stdout_regex)
|
||||||
stdout = $1.clone
|
stdout = $1.clone
|
||||||
@ -82,8 +87,14 @@ class GeneratorTestResults
|
|||||||
# collect up test results minus and extra output
|
# collect up test results minus and extra output
|
||||||
elements = (line.strip.split(':'))[1..-1]
|
elements = (line.strip.split(':'))[1..-1]
|
||||||
|
|
||||||
return {:test => elements[1], :line => elements[0].to_i, :message => (elements[3..-1].join(':')).strip}, stdout if elements.size >= 3
|
# find timestamp if available
|
||||||
return {:test => '???', :line => -1, :message => nil} #fallback safe option. TODO better handling
|
if (elements[-1] =~ / \((\d*(?:\.\d*)?) ms\)/)
|
||||||
|
unity_test_time = $1.to_f / 1000
|
||||||
|
elements[-1].sub!(/ \((\d*(?:\.\d*)?) ms\)/, '')
|
||||||
|
end
|
||||||
|
|
||||||
|
return {:test => elements[1], :line => elements[0].to_i, :message => (elements[3..-1].join(':')).strip, :unity_test_time => unity_test_time}, stdout if elements.size >= 3
|
||||||
|
return {:test => '???', :line => -1, :message => nil, :unity_test_time => unity_test_time} #fallback safe option. TODO better handling
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
@ -44,13 +44,15 @@ class GeneratorTestRunner
|
|||||||
def generate(module_name, runner_filepath, test_cases, mock_list, test_file_includes=[])
|
def generate(module_name, runner_filepath, test_cases, mock_list, test_file_includes=[])
|
||||||
require 'generate_test_runner.rb'
|
require 'generate_test_runner.rb'
|
||||||
|
|
||||||
|
header_extension = @configurator.extension_header
|
||||||
|
|
||||||
#actually build the test runner using Unity's test runner generator
|
#actually build the test runner using Unity's test runner generator
|
||||||
#(there is no need to use preprocessor here because we've already looked up test cases and are passing them in here)
|
#(there is no need to use preprocessor here because we've already looked up test cases and are passing them in here)
|
||||||
@test_runner_generator ||= UnityTestRunnerGenerator.new( @configurator.get_runner_config )
|
@test_runner_generator ||= UnityTestRunnerGenerator.new( @configurator.get_runner_config )
|
||||||
@test_runner_generator.generate( module_name,
|
@test_runner_generator.generate( module_name,
|
||||||
runner_filepath,
|
runner_filepath,
|
||||||
test_cases,
|
test_cases,
|
||||||
mock_list,
|
mock_list.map{|f| File.basename(f,'.*')+header_extension},
|
||||||
test_file_includes)
|
test_file_includes.map{|f| File.basename(f,'.*')+header_extension})
|
||||||
end
|
end
|
||||||
end
|
end
|
@ -235,6 +235,8 @@ preprocessinator:
|
|||||||
- task_invoker
|
- task_invoker
|
||||||
- file_path_utils
|
- file_path_utils
|
||||||
- yaml_wrapper
|
- yaml_wrapper
|
||||||
|
- project_config_manager
|
||||||
|
- configurator
|
||||||
|
|
||||||
preprocessinator_helper:
|
preprocessinator_helper:
|
||||||
compose:
|
compose:
|
||||||
@ -252,6 +254,7 @@ preprocessinator_includes_handler:
|
|||||||
- file_path_utils
|
- file_path_utils
|
||||||
- yaml_wrapper
|
- yaml_wrapper
|
||||||
- file_wrapper
|
- file_wrapper
|
||||||
|
- file_finder
|
||||||
|
|
||||||
preprocessinator_file_handler:
|
preprocessinator_file_handler:
|
||||||
compose:
|
compose:
|
56
test/unit-test/vendor/ceedling/lib/ceedling/preprocessinator.rb
vendored
Normal file
56
test/unit-test/vendor/ceedling/lib/ceedling/preprocessinator.rb
vendored
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
|
||||||
|
class Preprocessinator
|
||||||
|
|
||||||
|
constructor :preprocessinator_helper, :preprocessinator_includes_handler, :preprocessinator_file_handler, :task_invoker, :file_path_utils, :yaml_wrapper, :project_config_manager, :configurator
|
||||||
|
|
||||||
|
|
||||||
|
def setup
|
||||||
|
# fashion ourselves callbacks @preprocessinator_helper can use
|
||||||
|
@preprocess_includes_proc = Proc.new { |filepath| self.preprocess_shallow_includes(filepath) }
|
||||||
|
@preprocess_mock_file_proc = Proc.new { |filepath| self.preprocess_file(filepath) }
|
||||||
|
@preprocess_test_file_directives_proc = Proc.new { |filepath| self.preprocess_file_directives(filepath) }
|
||||||
|
@preprocess_test_file_proc = Proc.new { |filepath| self.preprocess_file(filepath) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def preprocess_shallow_source_includes(test)
|
||||||
|
@preprocessinator_helper.preprocess_source_includes(test)
|
||||||
|
end
|
||||||
|
|
||||||
|
def preprocess_test_and_invoke_test_mocks(test)
|
||||||
|
@preprocessinator_helper.preprocess_includes(test, @preprocess_includes_proc)
|
||||||
|
|
||||||
|
mocks_list = @preprocessinator_helper.assemble_mocks_list(test)
|
||||||
|
|
||||||
|
@project_config_manager.process_test_defines_change(mocks_list)
|
||||||
|
|
||||||
|
@preprocessinator_helper.preprocess_mockable_headers(mocks_list, @preprocess_mock_file_proc)
|
||||||
|
|
||||||
|
@task_invoker.invoke_test_mocks(mocks_list)
|
||||||
|
|
||||||
|
if (@configurator.project_use_preprocessor_directives)
|
||||||
|
@preprocessinator_helper.preprocess_test_file(test, @preprocess_test_file_directives_proc)
|
||||||
|
else
|
||||||
|
@preprocessinator_helper.preprocess_test_file(test, @preprocess_test_file_proc)
|
||||||
|
end
|
||||||
|
|
||||||
|
return mocks_list
|
||||||
|
end
|
||||||
|
|
||||||
|
def preprocess_shallow_includes(filepath)
|
||||||
|
includes = @preprocessinator_includes_handler.extract_includes(filepath)
|
||||||
|
|
||||||
|
@preprocessinator_includes_handler.write_shallow_includes_list(
|
||||||
|
@file_path_utils.form_preprocessed_includes_list_filepath(filepath), includes)
|
||||||
|
end
|
||||||
|
|
||||||
|
def preprocess_file(filepath)
|
||||||
|
@preprocessinator_includes_handler.invoke_shallow_includes_list(filepath)
|
||||||
|
@preprocessinator_file_handler.preprocess_file( filepath, @yaml_wrapper.load(@file_path_utils.form_preprocessed_includes_list_filepath(filepath)) )
|
||||||
|
end
|
||||||
|
|
||||||
|
def preprocess_file_directives(filepath)
|
||||||
|
@preprocessinator_includes_handler.invoke_shallow_includes_list( filepath )
|
||||||
|
@preprocessinator_file_handler.preprocess_file_directives( filepath,
|
||||||
|
@yaml_wrapper.load( @file_path_utils.form_preprocessed_includes_list_filepath( filepath ) ) )
|
||||||
|
end
|
||||||
|
end
|
@ -16,6 +16,7 @@ class PreprocessinatorExtractor
|
|||||||
|
|
||||||
lines = []
|
lines = []
|
||||||
File.readlines(filepath).each do |line|
|
File.readlines(filepath).each do |line|
|
||||||
|
line.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
|
||||||
if found_file and not line =~ not_pragma
|
if found_file and not line =~ not_pragma
|
||||||
lines << line
|
lines << line
|
||||||
else
|
else
|
||||||
@ -27,4 +28,28 @@ class PreprocessinatorExtractor
|
|||||||
|
|
||||||
return lines
|
return lines
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def extract_base_file_from_preprocessed_directives(filepath)
|
||||||
|
# preprocessing by way of toolchain preprocessor eliminates directives only
|
||||||
|
# like #ifdef's and leave other code
|
||||||
|
|
||||||
|
# iterate through all lines and only get last chunk of file after a last
|
||||||
|
# '#'line containing file name of our filepath
|
||||||
|
|
||||||
|
base_name = File.basename(filepath)
|
||||||
|
pattern = /^#.*(\s|\/|\\|\")#{Regexp.escape(base_name)}/
|
||||||
|
found_file = false # have we found the file we care about?
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
File.readlines(filepath).each do |line|
|
||||||
|
line.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
|
||||||
|
lines << line
|
||||||
|
|
||||||
|
if line =~ pattern
|
||||||
|
lines = []
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return lines
|
||||||
|
end
|
||||||
end
|
end
|
@ -18,4 +18,17 @@ class PreprocessinatorFileHandler
|
|||||||
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
|
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def preprocess_file_directives(filepath, includes)
|
||||||
|
preprocessed_filepath = @file_path_utils.form_preprocessed_file_filepath(filepath)
|
||||||
|
|
||||||
|
command = @tool_executor.build_command_line(@configurator.tools_test_file_preprocessor_directives, [], filepath, preprocessed_filepath)
|
||||||
|
@tool_executor.exec(command[:line], command[:options])
|
||||||
|
|
||||||
|
contents = @preprocessinator_extractor.extract_base_file_from_preprocessed_directives(preprocessed_filepath)
|
||||||
|
|
||||||
|
includes.each{|include| contents.unshift("#include \"#{include}\"")}
|
||||||
|
|
||||||
|
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
@ -15,6 +15,10 @@ class PreprocessinatorHelper
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def preprocess_source_includes(test)
|
||||||
|
@test_includes_extractor.parse_test_file_source_include(test)
|
||||||
|
end
|
||||||
|
|
||||||
def assemble_mocks_list(test)
|
def assemble_mocks_list(test)
|
||||||
return @file_path_utils.form_mocks_source_filelist( @test_includes_extractor.lookup_raw_mock_list(test) )
|
return @file_path_utils.form_mocks_source_filelist( @test_includes_extractor.lookup_raw_mock_list(test) )
|
||||||
end
|
end
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
class PreprocessinatorIncludesHandler
|
class PreprocessinatorIncludesHandler
|
||||||
|
|
||||||
constructor :configurator, :tool_executor, :task_invoker, :file_path_utils, :yaml_wrapper, :file_wrapper
|
constructor :configurator, :tool_executor, :task_invoker, :file_path_utils, :yaml_wrapper, :file_wrapper, :file_finder
|
||||||
@@makefile_cache = {}
|
@@makefile_cache = {}
|
||||||
|
|
||||||
# shallow includes: only those headers a source file explicitly includes
|
# shallow includes: only those headers a source file explicitly includes
|
||||||
@ -65,6 +65,7 @@ class PreprocessinatorIncludesHandler
|
|||||||
to_process = [filepath]
|
to_process = [filepath]
|
||||||
ignore_list = []
|
ignore_list = []
|
||||||
list = []
|
list = []
|
||||||
|
all_mocks = []
|
||||||
|
|
||||||
include_paths = @configurator.project_config_hash[:collection_paths_include]
|
include_paths = @configurator.project_config_hash[:collection_paths_include]
|
||||||
include_paths = [] if include_paths.nil?
|
include_paths = [] if include_paths.nil?
|
||||||
@ -73,12 +74,10 @@ class PreprocessinatorIncludesHandler
|
|||||||
while to_process.length > 0
|
while to_process.length > 0
|
||||||
target = to_process.shift()
|
target = to_process.shift()
|
||||||
ignore_list << target
|
ignore_list << target
|
||||||
# puts "[HELL] Processing: \t\t#{target}"
|
new_deps, new_to_process, all_mocks = extract_includes_helper(target, include_paths, ignore_list, all_mocks)
|
||||||
new_deps, new_to_process = extract_includes_helper(target, include_paths, ignore_list)
|
|
||||||
list += new_deps
|
list += new_deps
|
||||||
to_process += new_to_process
|
to_process += new_to_process
|
||||||
if (!@configurator.project_config_hash.has_key?(:project_auto_link_deep_dependencies) or
|
if !@configurator.project_config_hash[:project_auto_link_deep_dependencies]
|
||||||
!@configurator.project_config_hash[:project_auto_link_deep_dependencies])
|
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
list = list.uniq()
|
list = list.uniq()
|
||||||
@ -89,93 +88,102 @@ class PreprocessinatorIncludesHandler
|
|||||||
return list
|
return list
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_includes_helper(filepath, include_paths, ignore_list)
|
def extract_includes_helper(filepath, include_paths, ignore_list, mocks)
|
||||||
# Extract the dependencies from the make rule
|
# Extract the dependencies from the make rule
|
||||||
hdr_ext = @configurator.extension_header
|
|
||||||
make_rule = self.form_shallow_dependencies_rule(filepath)
|
make_rule = self.form_shallow_dependencies_rule(filepath)
|
||||||
dependencies = make_rule.split.find_all {|path| path.end_with?(hdr_ext) }.uniq
|
target_file = make_rule.split[0].gsub(':', '').gsub('\\','/')
|
||||||
dependencies.map! {|hdr| hdr.gsub('\\','/') }
|
base = File.basename(target_file, File.extname(target_file))
|
||||||
|
make_rule_dependencies = make_rule.gsub(/.*\b#{Regexp.escape(base)}\S*/, '').gsub(/\\$/, '')
|
||||||
|
|
||||||
# Separate the real files form the annotated ones and remove the '@@@@'
|
# Extract the headers dependencies from the make rule
|
||||||
annotated_headers, real_headers = dependencies.partition {|hdr| hdr =~ /^@@@@/ }
|
hdr_ext = @configurator.extension_header
|
||||||
annotated_headers.map! {|hdr| hdr.gsub('@@@@','') }
|
headers_dependencies = make_rule_dependencies.split.find_all {|path| path.end_with?(hdr_ext) }.uniq
|
||||||
# Matching annotated_headers values against real_headers to ensure that
|
headers_dependencies.map! {|hdr| hdr.gsub('\\','/') }
|
||||||
# annotated_headers contain full path entries (as returned by make rule)
|
full_path_headers_dependencies = extract_full_path_dependencies(headers_dependencies)
|
||||||
annotated_headers.map! {|hdr| real_headers.find {|real_hdr| !real_hdr.match(/(.*\/)?#{Regexp.escape(hdr)}/).nil? } }
|
|
||||||
annotated_headers = annotated_headers.compact
|
|
||||||
|
|
||||||
# Find which of our annotated headers are "real" dependencies. This is
|
# Extract the sources dependencies from the make rule
|
||||||
# intended to weed out dependencies that have been removed due to build
|
src_ext = @configurator.extension_source
|
||||||
# options defined in the project yaml and/or in the headers themselves.
|
sources_dependencies = make_rule_dependencies.split.find_all {|path| path.end_with?(src_ext) }.uniq
|
||||||
list = annotated_headers.find_all do |annotated_header|
|
sources_dependencies.map! {|src| src.gsub('\\','/') }
|
||||||
# find the index of the "real" include that matches the annotated one.
|
full_path_sources_dependencies = extract_full_path_dependencies(sources_dependencies)
|
||||||
idx = real_headers.find_index do |real_header|
|
|
||||||
real_header =~ /^(.*\/)?#{Regexp.escape(annotated_header)}$/
|
list = full_path_headers_dependencies + full_path_sources_dependencies
|
||||||
end
|
|
||||||
# If we found a real include, delete it from the array and return it,
|
mock_prefix = @configurator.project_config_hash[:cmock_mock_prefix]
|
||||||
# otherwise return nil. Since nil is falsy this has the effect of making
|
# Creating list of mocks
|
||||||
# find_all return only the annotated headers for which a real include was
|
mocks += full_path_headers_dependencies.find_all do |header|
|
||||||
# found/deleted
|
File.basename(header) =~ /^#{mock_prefix}.*$/
|
||||||
idx ? real_headers.delete_at(idx) : nil
|
|
||||||
end.compact
|
end.compact
|
||||||
|
|
||||||
# Extract direct dependencies that were also added
|
# ignore real file when both mock and real file exist
|
||||||
src_ext = @configurator.extension_source
|
mocks.each do |mock|
|
||||||
sdependencies = make_rule.split.find_all {|path| path.end_with?(src_ext) }.uniq
|
list.each do |filename|
|
||||||
sdependencies.map! {|hdr| hdr.gsub('\\','/') }
|
if File.basename(filename) == File.basename(mock).sub(mock_prefix, '')
|
||||||
list += sdependencies
|
ignore_list << filename
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end.compact
|
||||||
|
|
||||||
|
# Filtering list of final includes to only include mocks and anything that is NOT in the ignore_list
|
||||||
|
list = list.select do |item|
|
||||||
|
mocks.include? item or !(ignore_list.any? { |ignore_item| !item.match(/^(.*\/)?#{Regexp.escape(ignore_item)}$/).nil? })
|
||||||
|
end
|
||||||
|
|
||||||
to_process = []
|
to_process = []
|
||||||
|
|
||||||
if @configurator.project_config_hash.has_key?(:project_auto_link_deep_dependencies) && @configurator.project_config_hash[:project_auto_link_deep_dependencies]
|
if @configurator.project_config_hash[:project_auto_link_deep_dependencies]
|
||||||
# Creating list of mocks
|
|
||||||
mocks = annotated_headers.find_all do |annotated_header|
|
|
||||||
File.basename(annotated_header) =~ /^#{@configurator.project_config_hash[:cmock_mock_prefix]}.*$/
|
|
||||||
end.compact
|
|
||||||
|
|
||||||
# Creating list of headers that should be recursively pre-processed
|
# Creating list of headers that should be recursively pre-processed
|
||||||
# Skipping mocks and unity.h
|
# Skipping mocks and vendor headers
|
||||||
headers_to_deep_link = annotated_headers.select do |annotated_header|
|
headers_to_deep_link = full_path_headers_dependencies.select do |hdr|
|
||||||
!(mocks.include? annotated_header) and (annotated_header.match(/^(.*\/)?unity\.h$/).nil?)
|
!(mocks.include? hdr) and (hdr.match(/^(.*\/)(#{VENDORS_FILES.join('|')}) + #{Regexp.escape(hdr_ext)}$/).nil?)
|
||||||
end
|
|
||||||
headers_to_deep_link.map! {|hdr| File.expand_path(hdr)}
|
|
||||||
|
|
||||||
mocks.each do |mock|
|
|
||||||
dirname = File.dirname(mock)
|
|
||||||
#basename = File.basename(mock).delete_prefix(@configurator.project_config_hash[:cmock_mock_prefix])
|
|
||||||
basename = File.basename(mock).sub(@configurator.project_config_hash[:cmock_mock_prefix], '')
|
|
||||||
if dirname != "."
|
|
||||||
ignore_list << File.join(dirname, basename)
|
|
||||||
else
|
|
||||||
ignore_list << basename
|
|
||||||
end
|
|
||||||
end.compact
|
|
||||||
|
|
||||||
# Filtering list of final includes to only include mocks and anything that is NOT in the ignore_list
|
|
||||||
list = list.select do |item|
|
|
||||||
mocks.include? item or !(ignore_list.any? { |ignore_item| !item.match(/^(.*\/)?#{Regexp.escape(ignore_item)}$/).nil? })
|
|
||||||
end
|
end
|
||||||
|
headers_to_deep_link.map! {|hdr| File.expand_path(hdr) }
|
||||||
|
headers_to_deep_link.compact!
|
||||||
|
|
||||||
headers_to_deep_link.each do |hdr|
|
headers_to_deep_link.each do |hdr|
|
||||||
if (ignore_list.none? {|ignore_header| hdr.match(/^(.*\/)?#{Regexp.escape(ignore_header)}$/)} and
|
if (ignore_list.none? {|ignore_header| hdr.match(/^(.*\/)?#{Regexp.escape(ignore_header)}$/)} and
|
||||||
include_paths.none? {|include_path| hdr =~ /^#{include_path}\.*/})
|
include_paths.none? {|include_path| hdr =~ /^#{include_path}\.*/})
|
||||||
if File.exist?(hdr)
|
if File.exist?(hdr)
|
||||||
to_process << hdr
|
to_process << hdr
|
||||||
#source_file = hdr.delete_suffix(hdr_ext) + src_ext
|
src = @file_finder.find_compilation_input_file(hdr, :ignore)
|
||||||
source_file = hdr.chomp(hdr_ext) + src_ext
|
to_process << src if src
|
||||||
if source_file != hdr and File.exist?(source_file)
|
|
||||||
to_process << source_file
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
return list, to_process
|
return list, to_process, mocks
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def write_shallow_includes_list(filepath, list)
|
def write_shallow_includes_list(filepath, list)
|
||||||
@yaml_wrapper.dump(filepath, list)
|
@yaml_wrapper.dump(filepath, list)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_full_path_dependencies(dependencies)
|
||||||
|
# Separate the real files form the annotated ones and remove the '@@@@'
|
||||||
|
annotated_files, real_files = dependencies.partition {|file| file =~ /^@@@@/}
|
||||||
|
annotated_files.map! {|file| file.gsub('@@@@','') }
|
||||||
|
# Matching annotated_files values against real_files to ensure that
|
||||||
|
# annotated_files contain full path entries (as returned by make rule)
|
||||||
|
annotated_files.map! {|file| real_files.find {|real| !real.match(/^(.*\/)?#{Regexp.escape(file)}$/).nil?}}
|
||||||
|
annotated_files = annotated_files.compact
|
||||||
|
|
||||||
|
# Find which of our annotated files are "real" dependencies. This is
|
||||||
|
# intended to weed out dependencies that have been removed due to build
|
||||||
|
# options defined in the project yaml and/or in the files themselves.
|
||||||
|
return annotated_files.find_all do |annotated_file|
|
||||||
|
# find the index of the "real" file that matches the annotated one.
|
||||||
|
idx = real_files.find_index do |real_file|
|
||||||
|
real_file =~ /^(.*\/)?#{Regexp.escape(annotated_file)}$/
|
||||||
|
end
|
||||||
|
# If we found a real file, delete it from the array and return it,
|
||||||
|
# otherwise return nil. Since nil is falsy this has the effect of making
|
||||||
|
# find_all return only the annotated filess for which a real file was
|
||||||
|
# found/deleted
|
||||||
|
idx ? real_files.delete_at(idx) : nil
|
||||||
|
end.compact
|
||||||
|
end
|
||||||
end
|
end
|
@ -23,6 +23,12 @@ class ProjectConfigManager
|
|||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
|
def filter_internal_sources(sources)
|
||||||
|
filtered_sources = sources.clone
|
||||||
|
filtered_sources.delete_if { |item| item =~ /#{CMOCK_MOCK_PREFIX}.+#{Regexp.escape(EXTENSION_SOURCE)}$/ }
|
||||||
|
filtered_sources.delete_if { |item| item =~ /#{VENDORS_FILES.map{|source| '\b' + Regexp.escape(source.ext(EXTENSION_SOURCE)) + '\b'}.join('|')}$/ }
|
||||||
|
return filtered_sources
|
||||||
|
end
|
||||||
|
|
||||||
def process_release_config_change
|
def process_release_config_change
|
||||||
# has project configuration changed since last release build
|
# has project configuration changed since last release build
|
||||||
@ -40,7 +46,7 @@ class ProjectConfigManager
|
|||||||
@test_defines_changed = @cacheinator.diff_cached_test_defines?( files )
|
@test_defines_changed = @cacheinator.diff_cached_test_defines?( files )
|
||||||
if @test_defines_changed
|
if @test_defines_changed
|
||||||
# update timestamp for rake task prerequisites
|
# update timestamp for rake task prerequisites
|
||||||
@file_wrapper.touch( @configurator.project_test_force_rebuild_filepath )
|
@file_wrapper.touch( @configurator.project_test_force_rebuild_filepath, :mtime => Time.now + 10 )
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
@ -10,7 +10,6 @@ $LOAD_PATH.unshift( CEEDLING_LIB )
|
|||||||
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'unity/auto') )
|
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'unity/auto') )
|
||||||
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'diy/lib') )
|
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'diy/lib') )
|
||||||
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'cmock/lib') )
|
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'cmock/lib') )
|
||||||
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'deep_merge/lib') )
|
|
||||||
|
|
||||||
require 'rake'
|
require 'rake'
|
||||||
|
|
@ -56,15 +56,40 @@ class ReleaseInvoker
|
|||||||
end
|
end
|
||||||
|
|
||||||
def convert_libraries_to_arguments(libraries)
|
def convert_libraries_to_arguments(libraries)
|
||||||
args = (libraries || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])
|
args = ((libraries || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])).flatten
|
||||||
if (defined? LIBRARIES_FLAG)
|
if (defined? LIBRARIES_FLAG)
|
||||||
args.map! {|v| LIBRARIES_FLAG.gsub(/\$\{1\}/, v) }
|
args.map! {|v| LIBRARIES_FLAG.gsub(/\$\{1\}/, v) }
|
||||||
end
|
end
|
||||||
return args
|
return args
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_library_paths_to_arguments()
|
||||||
|
paths = (defined? PATHS_LIBRARIES) ? (PATHS_LIBRARIES || []).clone : []
|
||||||
|
if (defined? LIBRARIES_PATH_FLAG)
|
||||||
|
paths.map! {|v| LIBRARIES_PATH_FLAG.gsub(/\$\{1\}/, v) }
|
||||||
|
end
|
||||||
|
return paths
|
||||||
|
end
|
||||||
|
|
||||||
def sort_objects_and_libraries(both)
|
def sort_objects_and_libraries(both)
|
||||||
extension = "\\" + ((defined? EXTENSION_SUBPROJECTS) ? EXTENSION_SUBPROJECTS : ".LIBRARY")
|
extension = if ((defined? EXTENSION_SUBPROJECTS) && (defined? EXTENSION_LIBRARIES))
|
||||||
|
extension_libraries = if (EXTENSION_LIBRARIES.class == Array)
|
||||||
|
EXTENSION_LIBRARIES.join(")|(?:\\")
|
||||||
|
else
|
||||||
|
EXTENSION_LIBRARIES
|
||||||
|
end
|
||||||
|
"(?:\\#{EXTENSION_SUBPROJECTS})|(?:\\#{extension_libraries})"
|
||||||
|
elsif (defined? EXTENSION_SUBPROJECTS)
|
||||||
|
"\\#{EXTENSION_SUBPROJECTS}"
|
||||||
|
elsif (defined? EXTENSION_LIBRARIES)
|
||||||
|
if (EXTENSION_LIBRARIES.class == Array)
|
||||||
|
"(?:\\#{EXTENSION_LIBRARIES.join(")|(?:\\")})"
|
||||||
|
else
|
||||||
|
"\\#{EXTENSION_LIBRARIES}"
|
||||||
|
end
|
||||||
|
else
|
||||||
|
"\\.LIBRARY"
|
||||||
|
end
|
||||||
sorted_objects = both.group_by {|v| v.match(/.+#{extension}$/) ? :libraries : :objects }
|
sorted_objects = both.group_by {|v| v.match(/.+#{extension}$/) ? :libraries : :objects }
|
||||||
libraries = sorted_objects[:libraries] || []
|
libraries = sorted_objects[:libraries] || []
|
||||||
objects = sorted_objects[:objects] || []
|
objects = sorted_objects[:objects] || []
|
@ -2,6 +2,17 @@
|
|||||||
RELEASE_COMPILE_TASK_ROOT = RELEASE_TASK_ROOT + 'compile:' unless defined?(RELEASE_COMPILE_TASK_ROOT)
|
RELEASE_COMPILE_TASK_ROOT = RELEASE_TASK_ROOT + 'compile:' unless defined?(RELEASE_COMPILE_TASK_ROOT)
|
||||||
RELEASE_ASSEMBLE_TASK_ROOT = RELEASE_TASK_ROOT + 'assemble:' unless defined?(RELEASE_ASSEMBLE_TASK_ROOT)
|
RELEASE_ASSEMBLE_TASK_ROOT = RELEASE_TASK_ROOT + 'assemble:' unless defined?(RELEASE_ASSEMBLE_TASK_ROOT)
|
||||||
|
|
||||||
|
# If GCC and Releasing a Library, Update Tools to Automatically Have Necessary Tags
|
||||||
|
if (TOOLS_RELEASE_COMPILER[:executable] == DEFAULT_RELEASE_COMPILER_TOOL[:executable])
|
||||||
|
if (File.extname(PROJECT_RELEASE_BUILD_TARGET) == '.so')
|
||||||
|
TOOLS_RELEASE_COMPILER[:arguments] << "-fPIC" unless TOOLS_RELEASE_COMPILER[:arguments].include?("-fPIC")
|
||||||
|
TOOLS_RELEASE_LINKER[:arguments] << "-shared" unless TOOLS_RELEASE_LINKER[:arguments].include?("-shared")
|
||||||
|
elsif (File.extname(PROJECT_RELEASE_BUILD_TARGET) == '.a')
|
||||||
|
TOOLS_RELEASE_COMPILER[:arguments] << "-fPIC" unless TOOLS_RELEASE_COMPILER[:arguments].include?("-fPIC")
|
||||||
|
TOOLS_RELEASE_LINKER[:executable] = 'ar'
|
||||||
|
TOOLS_RELEASE_LINKER[:arguments] = ['rcs', '${2}', '${1}'].compact
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
if (RELEASE_BUILD_USE_ASSEMBLY)
|
if (RELEASE_BUILD_USE_ASSEMBLY)
|
||||||
rule(/#{PROJECT_RELEASE_BUILD_OUTPUT_ASM_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
|
rule(/#{PROJECT_RELEASE_BUILD_OUTPUT_ASM_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
|
||||||
@ -37,16 +48,18 @@ end
|
|||||||
|
|
||||||
rule(/#{PROJECT_RELEASE_BUILD_TARGET}/) do |bin_file|
|
rule(/#{PROJECT_RELEASE_BUILD_TARGET}/) do |bin_file|
|
||||||
objects, libraries = @ceedling[:release_invoker].sort_objects_and_libraries(bin_file.prerequisites)
|
objects, libraries = @ceedling[:release_invoker].sort_objects_and_libraries(bin_file.prerequisites)
|
||||||
tool = TOOLS_RELEASE_LINKER.clone
|
tool = TOOLS_RELEASE_LINKER.clone
|
||||||
lib_args = @ceedling[:release_invoker].convert_libraries_to_arguments(libraries)
|
lib_args = @ceedling[:release_invoker].convert_libraries_to_arguments(libraries)
|
||||||
map_file = @ceedling[:configurator].project_release_build_map
|
lib_paths = @ceedling[:release_invoker].get_library_paths_to_arguments()
|
||||||
|
map_file = @ceedling[:configurator].project_release_build_map
|
||||||
@ceedling[:generator].generate_executable_file(
|
@ceedling[:generator].generate_executable_file(
|
||||||
tool,
|
tool,
|
||||||
RELEASE_SYM,
|
RELEASE_SYM,
|
||||||
objects,
|
objects,
|
||||||
bin_file.name,
|
bin_file.name,
|
||||||
map_file,
|
map_file,
|
||||||
lib_args )
|
lib_args,
|
||||||
|
lib_paths )
|
||||||
@ceedling[:release_invoker].artifactinate( bin_file.name, map_file, @ceedling[:configurator].release_build_artifacts )
|
@ceedling[:release_invoker].artifactinate( bin_file.name, map_file, @ceedling[:configurator].release_build_artifacts )
|
||||||
end
|
end
|
||||||
|
|
@ -34,16 +34,16 @@ end
|
|||||||
|
|
||||||
|
|
||||||
rule(/#{PROJECT_TEST_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
|
rule(/#{PROJECT_TEST_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
|
||||||
|
|
||||||
lib_args = @ceedling[:test_invoker].convert_libraries_to_arguments()
|
lib_args = @ceedling[:test_invoker].convert_libraries_to_arguments()
|
||||||
|
lib_paths = @ceedling[:test_invoker].get_library_paths_to_arguments()
|
||||||
@ceedling[:generator].generate_executable_file(
|
@ceedling[:generator].generate_executable_file(
|
||||||
TOOLS_TEST_LINKER,
|
TOOLS_TEST_LINKER,
|
||||||
TEST_SYM,
|
TEST_SYM,
|
||||||
bin_file.prerequisites,
|
bin_file.prerequisites,
|
||||||
bin_file.name,
|
bin_file.name,
|
||||||
@ceedling[:file_path_utils].form_test_build_map_filepath( bin_file.name ),
|
@ceedling[:file_path_utils].form_test_build_map_filepath( bin_file.name ),
|
||||||
lib_args )
|
lib_args,
|
||||||
|
lib_paths )
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
@ -66,8 +66,7 @@ namespace TEST_SYM do
|
|||||||
@ceedling[:file_finder].find_test_from_file_path(test)
|
@ceedling[:file_finder].find_test_from_file_path(test)
|
||||||
end
|
end
|
||||||
]) do |test|
|
]) do |test|
|
||||||
@ceedling[:rake_wrapper][:directories].reenable if @ceedling[:task_invoker].first_run == false && @ceedling[:project_config_manager].test_defines_changed
|
@ceedling[:rake_wrapper][:test_deps].invoke
|
||||||
@ceedling[:rake_wrapper][:directories].invoke
|
|
||||||
@ceedling[:test_invoker].setup_and_invoke([test.source])
|
@ceedling[:test_invoker].setup_and_invoke([test.source])
|
||||||
end
|
end
|
||||||
end
|
end
|
@ -25,8 +25,8 @@ class Setupinator
|
|||||||
@ceedling[:configurator].populate_cmock_defaults( config_hash )
|
@ceedling[:configurator].populate_cmock_defaults( config_hash )
|
||||||
@ceedling[:configurator].find_and_merge_plugins( config_hash )
|
@ceedling[:configurator].find_and_merge_plugins( config_hash )
|
||||||
@ceedling[:configurator].merge_imports( config_hash )
|
@ceedling[:configurator].merge_imports( config_hash )
|
||||||
@ceedling[:configurator].tools_setup( config_hash )
|
|
||||||
@ceedling[:configurator].eval_environment_variables( config_hash )
|
@ceedling[:configurator].eval_environment_variables( config_hash )
|
||||||
|
@ceedling[:configurator].tools_setup( config_hash )
|
||||||
@ceedling[:configurator].eval_paths( config_hash )
|
@ceedling[:configurator].eval_paths( config_hash )
|
||||||
@ceedling[:configurator].standardize_paths( config_hash )
|
@ceedling[:configurator].standardize_paths( config_hash )
|
||||||
@ceedling[:configurator].validate( config_hash )
|
@ceedling[:configurator].validate( config_hash )
|
@ -46,25 +46,31 @@ class TaskInvoker
|
|||||||
return @rake_utils.task_invoked?(regex)
|
return @rake_utils.task_invoked?(regex)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def reset_rake_task_for_changed_defines(file)
|
||||||
|
if !(file =~ /#{VENDORS_FILES.map{|ignore| '\b' + ignore.ext(File.extname(file)) + '\b'}.join('|')}$/)
|
||||||
|
@rake_wrapper[file].clear_actions if @first_run == false && @project_config_manager.test_defines_changed
|
||||||
|
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def invoke_test_mocks(mocks)
|
def invoke_test_mocks(mocks)
|
||||||
@dependinator.enhance_mock_dependencies( mocks )
|
@dependinator.enhance_mock_dependencies( mocks )
|
||||||
mocks.each { |mock|
|
mocks.each { |mock|
|
||||||
@rake_wrapper[mock].reenable if @first_run == false && @project_config_manager.test_defines_changed
|
reset_rake_task_for_changed_defines( mock )
|
||||||
@rake_wrapper[mock].invoke
|
@rake_wrapper[mock].invoke
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def invoke_test_runner(runner)
|
def invoke_test_runner(runner)
|
||||||
@dependinator.enhance_runner_dependencies( runner )
|
@dependinator.enhance_runner_dependencies( runner )
|
||||||
@rake_wrapper[runner].reenable if @first_run == false && @project_config_manager.test_defines_changed
|
reset_rake_task_for_changed_defines( runner )
|
||||||
@rake_wrapper[runner].invoke
|
@rake_wrapper[runner].invoke
|
||||||
end
|
end
|
||||||
|
|
||||||
def invoke_test_shallow_include_lists(files)
|
def invoke_test_shallow_include_lists(files)
|
||||||
@dependinator.enhance_shallow_include_lists_dependencies( files )
|
@dependinator.enhance_shallow_include_lists_dependencies( files )
|
||||||
par_map(PROJECT_COMPILE_THREADS, files) do |file|
|
par_map(PROJECT_COMPILE_THREADS, files) do |file|
|
||||||
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
|
reset_rake_task_for_changed_defines( file )
|
||||||
@rake_wrapper[file].invoke
|
@rake_wrapper[file].invoke
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -72,7 +78,7 @@ class TaskInvoker
|
|||||||
def invoke_test_preprocessed_files(files)
|
def invoke_test_preprocessed_files(files)
|
||||||
@dependinator.enhance_preprocesed_file_dependencies( files )
|
@dependinator.enhance_preprocesed_file_dependencies( files )
|
||||||
par_map(PROJECT_COMPILE_THREADS, files) do |file|
|
par_map(PROJECT_COMPILE_THREADS, files) do |file|
|
||||||
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
|
reset_rake_task_for_changed_defines( file )
|
||||||
@rake_wrapper[file].invoke
|
@rake_wrapper[file].invoke
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -80,14 +86,14 @@ class TaskInvoker
|
|||||||
def invoke_test_dependencies_files(files)
|
def invoke_test_dependencies_files(files)
|
||||||
@dependinator.enhance_dependencies_dependencies( files )
|
@dependinator.enhance_dependencies_dependencies( files )
|
||||||
par_map(PROJECT_COMPILE_THREADS, files) do |file|
|
par_map(PROJECT_COMPILE_THREADS, files) do |file|
|
||||||
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
|
reset_rake_task_for_changed_defines( file )
|
||||||
@rake_wrapper[file].invoke
|
@rake_wrapper[file].invoke
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def invoke_test_objects(objects)
|
def invoke_test_objects(objects)
|
||||||
par_map(PROJECT_COMPILE_THREADS, objects) do |object|
|
par_map(PROJECT_COMPILE_THREADS, objects) do |object|
|
||||||
@rake_wrapper[object].reenable if @first_run == false && @project_config_manager.test_defines_changed
|
reset_rake_task_for_changed_defines( object )
|
||||||
@rake_wrapper[object].invoke
|
@rake_wrapper[object].invoke
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -98,7 +104,6 @@ class TaskInvoker
|
|||||||
|
|
||||||
def invoke_test_results(result)
|
def invoke_test_results(result)
|
||||||
@dependinator.enhance_results_dependencies( result )
|
@dependinator.enhance_results_dependencies( result )
|
||||||
@rake_wrapper[result].reenable if @first_run == false && @project_config_manager.test_defines_changed
|
|
||||||
@rake_wrapper[result].invoke
|
@rake_wrapper[result].invoke
|
||||||
end
|
end
|
||||||
|
|
@ -4,28 +4,10 @@ require 'ceedling/version'
|
|||||||
|
|
||||||
desc "Display build environment version info."
|
desc "Display build environment version info."
|
||||||
task :version do
|
task :version do
|
||||||
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
|
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
|
||||||
|
puts " Unity:: #{Ceedling::Version::UNITY}"
|
||||||
[
|
puts " CMock:: #{Ceedling::Version::CMOCK}"
|
||||||
['CException', File.join( CEEDLING_VENDOR, CEXCEPTION_ROOT_PATH)],
|
puts " CException:: #{Ceedling::Version::CEXCEPTION}"
|
||||||
[' CMock', File.join( CEEDLING_VENDOR, CMOCK_ROOT_PATH)],
|
|
||||||
[' Unity', File.join( CEEDLING_VENDOR, UNITY_ROOT_PATH)],
|
|
||||||
].each do |tool|
|
|
||||||
name = tool[0]
|
|
||||||
base_path = tool[1]
|
|
||||||
|
|
||||||
version_string = begin
|
|
||||||
@ceedling[:file_wrapper].read( File.join(base_path, 'release', 'version.info') ).strip
|
|
||||||
rescue
|
|
||||||
"UNKNOWN"
|
|
||||||
end
|
|
||||||
build_string = begin
|
|
||||||
@ceedling[:file_wrapper].read( File.join(base_path, 'release', 'build.info') ).strip
|
|
||||||
rescue
|
|
||||||
"UNKNOWN"
|
|
||||||
end
|
|
||||||
puts "#{name}:: #{version_string.empty? ? '#.#.' : (version_string + '.')}#{build_string.empty? ? '?' : build_string}"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
desc "Set verbose output (silent:[#{Verbosity::SILENT}] - obnoxious:[#{Verbosity::OBNOXIOUS}])."
|
desc "Set verbose output (silent:[#{Verbosity::SILENT}] - obnoxious:[#{Verbosity::OBNOXIOUS}])."
|
||||||
@ -65,6 +47,12 @@ task :sanity_checks, :level do |t, args|
|
|||||||
@ceedling[:configurator].sanity_checks = check_level
|
@ceedling[:configurator].sanity_checks = check_level
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# non advertised catch for calling upgrade in the wrong place
|
||||||
|
task :upgrade do
|
||||||
|
puts "WARNING: You're currently IN your project directory. Take a step out and try"
|
||||||
|
puts "again if you'd like to perform an upgrade."
|
||||||
|
end
|
||||||
|
|
||||||
# list expanded environment variables
|
# list expanded environment variables
|
||||||
if (not ENVIRONMENT.empty?)
|
if (not ENVIRONMENT.empty?)
|
||||||
desc "List all configured environment variables."
|
desc "List all configured environment variables."
|
||||||
@ -88,7 +76,7 @@ namespace :options do
|
|||||||
option = File.basename(option_path, '.yml')
|
option = File.basename(option_path, '.yml')
|
||||||
|
|
||||||
desc "Merge #{option} project options."
|
desc "Merge #{option} project options."
|
||||||
task option.downcase.to_sym do
|
task option.to_sym do
|
||||||
hash = @ceedling[:project_config_manager].merge_options( @ceedling[:setupinator].config_hash, option_path )
|
hash = @ceedling[:project_config_manager].merge_options( @ceedling[:setupinator].config_hash, option_path )
|
||||||
@ceedling[:setupinator].do_setup( hash )
|
@ceedling[:setupinator].do_setup( hash )
|
||||||
if @ceedling[:configurator].project_release_build
|
if @ceedling[:configurator].project_release_build
|
||||||
@ -97,6 +85,23 @@ namespace :options do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# This is to give nice errors when typing options
|
||||||
|
rule /^options:.*/ do |t, args|
|
||||||
|
filename = t.to_s.split(':')[-1] + '.yml'
|
||||||
|
filelist = COLLECTION_PROJECT_OPTIONS.map{|s| File.basename(s) }
|
||||||
|
@ceedling[:file_finder].find_file_from_list(filename, filelist, :error)
|
||||||
|
end
|
||||||
|
|
||||||
|
# This will output the fully-merged tools options to their own project.yml file
|
||||||
|
desc "Export tools options to a new project file"
|
||||||
|
task :export, :filename do |t, args|
|
||||||
|
outfile = args.filename || 'tools.yml'
|
||||||
|
toolcfg = {}
|
||||||
|
@ceedling[:configurator].project_config_hash.each_pair do |k,v|
|
||||||
|
toolcfg[k] = v if (k.to_s[0..5] == 'tools_')
|
||||||
|
end
|
||||||
|
File.open(outfile,'w') {|f| f << toolcfg.to_yaml({:indentation => 2})}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -45,26 +45,35 @@ task(:clobber => [:clean]) do
|
|||||||
@ceedling[:streaminator].stdout_puts("\nClobbering all generated files...\n(For large projects, this task may take a long time to complete)\n\n")
|
@ceedling[:streaminator].stdout_puts("\nClobbering all generated files...\n(For large projects, this task may take a long time to complete)\n\n")
|
||||||
begin
|
begin
|
||||||
CLOBBER.each { |fn| REMOVE_FILE_PROC.call(fn) }
|
CLOBBER.each { |fn| REMOVE_FILE_PROC.call(fn) }
|
||||||
|
@ceedling[:rake_wrapper][:directories].invoke
|
||||||
|
@ceedling[:dependinator].touch_force_rebuild_files
|
||||||
rescue
|
rescue
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# create a directory task for each of the paths, so we know how to build them
|
||||||
PROJECT_BUILD_PATHS.each { |path| directory(path) }
|
PROJECT_BUILD_PATHS.each { |path| directory(path) }
|
||||||
|
|
||||||
# create directories that hold build output and generated files & touching rebuild dependency sources
|
# create a single directory task which verifies all the others get built
|
||||||
task(:directories => PROJECT_BUILD_PATHS) { @ceedling[:dependinator].touch_force_rebuild_files }
|
task :directories => PROJECT_BUILD_PATHS
|
||||||
|
|
||||||
|
# when the force file doesn't exist, it probably means we clobbered or are on a fresh
|
||||||
|
# install. In either case, stuff was deleted, so assume we want to rebuild it all
|
||||||
|
file @ceedling[:configurator].project_test_force_rebuild_filepath do
|
||||||
|
unless File.exists?(@ceedling[:configurator].project_test_force_rebuild_filepath)
|
||||||
|
@ceedling[:dependinator].touch_force_rebuild_files
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# list paths discovered at load time
|
# list paths discovered at load time
|
||||||
namespace :paths do
|
namespace :paths do
|
||||||
|
standard_paths = ['test','source','include']
|
||||||
paths = @ceedling[:setupinator].config_hash[:paths]
|
paths = @ceedling[:setupinator].config_hash[:paths].keys.map{|n| n.to_s.downcase}
|
||||||
paths.each_key do |section|
|
paths = (paths + standard_paths).uniq
|
||||||
name = section.to_s.downcase
|
paths.each do |name|
|
||||||
path_list = Object.const_get("COLLECTION_PATHS_#{name.upcase}")
|
path_list = Object.const_get("COLLECTION_PATHS_#{name.upcase}")
|
||||||
|
|
||||||
if (path_list.size != 0)
|
if (path_list.size != 0) || (standard_paths.include?(name))
|
||||||
desc "List all collected #{name} paths."
|
desc "List all collected #{name} paths."
|
||||||
task(name.to_sym) { puts "#{name} paths:"; path_list.sort.each {|path| puts " - #{path}" } }
|
task(name.to_sym) { puts "#{name} paths:"; path_list.sort.each {|path| puts " - #{path}" } }
|
||||||
end
|
end
|
||||||
@ -77,10 +86,11 @@ end
|
|||||||
namespace :files do
|
namespace :files do
|
||||||
|
|
||||||
categories = [
|
categories = [
|
||||||
['test', COLLECTION_ALL_TESTS],
|
['test', COLLECTION_ALL_TESTS],
|
||||||
['source', COLLECTION_ALL_SOURCE],
|
['source', COLLECTION_ALL_SOURCE],
|
||||||
['header', COLLECTION_ALL_HEADERS]
|
['include', COLLECTION_ALL_HEADERS],
|
||||||
]
|
['support', COLLECTION_ALL_SUPPORT]
|
||||||
|
]
|
||||||
|
|
||||||
using_assembly = (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY) ||
|
using_assembly = (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY) ||
|
||||||
(defined?(RELEASE_BUILD_USE_ASSEMBLY) && RELEASE_BUILD_USE_ASSEMBLY)
|
(defined?(RELEASE_BUILD_USE_ASSEMBLY) && RELEASE_BUILD_USE_ASSEMBLY)
|
@ -1,13 +1,15 @@
|
|||||||
require 'ceedling/constants'
|
require 'ceedling/constants'
|
||||||
|
|
||||||
task :test => [:directories] do
|
task :test_deps => [:directories]
|
||||||
|
|
||||||
|
task :test => [:test_deps] do
|
||||||
Rake.application['test:all'].invoke
|
Rake.application['test:all'].invoke
|
||||||
end
|
end
|
||||||
|
|
||||||
namespace TEST_SYM do
|
namespace TEST_SYM do
|
||||||
|
|
||||||
desc "Run all unit tests (also just 'test' works)."
|
desc "Run all unit tests (also just 'test' works)."
|
||||||
task :all => [:directories] do
|
task :all => [:test_deps] do
|
||||||
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS)
|
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS)
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -21,17 +23,17 @@ namespace TEST_SYM do
|
|||||||
end
|
end
|
||||||
|
|
||||||
desc "Run tests for changed files."
|
desc "Run tests for changed files."
|
||||||
task :delta => [:directories] do
|
task :delta => [:test_deps] do
|
||||||
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, TEST_SYM, {:force_run => false})
|
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, TEST_SYM, {:force_run => false})
|
||||||
end
|
end
|
||||||
|
|
||||||
desc "Just build tests without running."
|
desc "Just build tests without running."
|
||||||
task :build_only => [:directories] do
|
task :build_only => [:test_deps] do
|
||||||
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, TEST_SYM, {:build_only => true})
|
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, TEST_SYM, {:build_only => true})
|
||||||
end
|
end
|
||||||
|
|
||||||
desc "Run tests by matching regular expression pattern."
|
desc "Run tests by matching regular expression pattern."
|
||||||
task :pattern, [:regex] => [:directories] do |t, args|
|
task :pattern, [:regex] => [:test_deps] do |t, args|
|
||||||
matches = []
|
matches = []
|
||||||
|
|
||||||
COLLECTION_ALL_TESTS.each { |test| matches << test if (test =~ /#{args.regex}/) }
|
COLLECTION_ALL_TESTS.each { |test| matches << test if (test =~ /#{args.regex}/) }
|
||||||
@ -44,7 +46,7 @@ namespace TEST_SYM do
|
|||||||
end
|
end
|
||||||
|
|
||||||
desc "Run tests whose test path contains [dir] or [dir] substring."
|
desc "Run tests whose test path contains [dir] or [dir] substring."
|
||||||
task :path, [:dir] => [:directories] do |t, args|
|
task :path, [:dir] => [:test_deps] do |t, args|
|
||||||
matches = []
|
matches = []
|
||||||
|
|
||||||
COLLECTION_ALL_TESTS.each { |test| matches << test if File.dirname(test).include?(args.dir.gsub(/\\/, '/')) }
|
COLLECTION_ALL_TESTS.each { |test| matches << test if File.dirname(test).include?(args.dir.gsub(/\\/, '/')) }
|
@ -19,6 +19,11 @@ class TestIncludesExtractor
|
|||||||
gather_and_store_includes( test, extract_from_file(test) )
|
gather_and_store_includes( test, extract_from_file(test) )
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# open, scan for, and sort & store includes of test file
|
||||||
|
def parse_test_file_source_include(test)
|
||||||
|
return extract_source_include_from_file(test)
|
||||||
|
end
|
||||||
|
|
||||||
# mocks with no file extension
|
# mocks with no file extension
|
||||||
def lookup_raw_mock_list(test)
|
def lookup_raw_mock_list(test)
|
||||||
file_key = form_file_key(test)
|
file_key = form_file_key(test)
|
||||||
@ -65,6 +70,27 @@ class TestIncludesExtractor
|
|||||||
return includes.uniq
|
return includes.uniq
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def extract_source_include_from_file(file)
|
||||||
|
source_includes = []
|
||||||
|
source_extension = @configurator.extension_source
|
||||||
|
|
||||||
|
contents = @file_wrapper.read(file)
|
||||||
|
|
||||||
|
# remove line comments
|
||||||
|
contents = contents.gsub(/\/\/.*$/, '')
|
||||||
|
# remove block comments
|
||||||
|
contents = contents.gsub(/\/\*.*?\*\//m, '')
|
||||||
|
|
||||||
|
contents.split("\n").each do |line|
|
||||||
|
# look for include statement
|
||||||
|
scan_results = line.scan(/#include\s+\"\s*(.+#{'\\'+source_extension})\s*\"/)
|
||||||
|
|
||||||
|
source_includes << scan_results[0][0] if (scan_results.size > 0)
|
||||||
|
end
|
||||||
|
|
||||||
|
return source_includes.uniq
|
||||||
|
end
|
||||||
|
|
||||||
def gather_and_store_includes(file, includes)
|
def gather_and_store_includes(file, includes)
|
||||||
mock_prefix = @configurator.cmock_mock_prefix
|
mock_prefix = @configurator.cmock_mock_prefix
|
||||||
header_extension = @configurator.extension_header
|
header_extension = @configurator.extension_header
|
@ -23,49 +23,24 @@ class TestInvoker
|
|||||||
@mocks = []
|
@mocks = []
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_test_definition_str(test)
|
|
||||||
return "-D" + File.basename(test, File.extname(test)).upcase.sub(/@.*$/, "")
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_tools_compilers
|
|
||||||
tools_compilers = Hash.new
|
|
||||||
tools_compilers["for unit test"] = TOOLS_TEST_COMPILER if defined? TOOLS_TEST_COMPILER
|
|
||||||
tools_compilers["for gcov"] = TOOLS_GCOV_COMPILER if defined? TOOLS_GCOV_COMPILER
|
|
||||||
return tools_compilers
|
|
||||||
end
|
|
||||||
|
|
||||||
def add_test_definition(test)
|
|
||||||
test_definition_str = get_test_definition_str(test)
|
|
||||||
get_tools_compilers.each do |tools_compiler_key, tools_compiler_value|
|
|
||||||
tools_compiler_value[:arguments].push("-D#{File.basename(test, ".*").strip.upcase.sub(/@.*$/, "")}")
|
|
||||||
@streaminator.stdout_puts("Add the definition value in the build option #{tools_compiler_value[:arguments][-1]} #{tools_compiler_key}", Verbosity::OBNOXIOUS)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def delete_test_definition(test)
|
|
||||||
test_definition_str = get_test_definition_str(test)
|
|
||||||
get_tools_compilers.each do |tools_compiler_key, tools_compiler_value|
|
|
||||||
num_options = tools_compiler_value[:arguments].size
|
|
||||||
@streaminator.stdout_puts("Delete the definition value in the build option #{tools_compiler_value[:arguments][-1]} #{tools_compiler_key}", Verbosity::OBNOXIOUS)
|
|
||||||
tools_compiler_value[:arguments].delete_if{|i| i == test_definition_str}
|
|
||||||
if num_options > tools_compiler_value[:arguments].size + 1
|
|
||||||
@streaminator.stderr_puts("WARNING: duplicated test definition.")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Convert libraries configuration form YAML configuration
|
# Convert libraries configuration form YAML configuration
|
||||||
# into a string that can be given to the compiler.
|
# into a string that can be given to the compiler.
|
||||||
def convert_libraries_to_arguments()
|
def convert_libraries_to_arguments()
|
||||||
if @configurator.project_config_hash.has_key?(:libraries_test)
|
args = ((@configurator.project_config_hash[:libraries_test] || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])).flatten
|
||||||
lib_args = @configurator.project_config_hash[:libraries_test]
|
if (defined? LIBRARIES_FLAG)
|
||||||
lib_args.flatten!
|
args.map! {|v| LIBRARIES_FLAG.gsub(/\$\{1\}/, v) }
|
||||||
lib_flag = @configurator.project_config_hash[:libraries_flag]
|
|
||||||
lib_args.map! {|v| lib_flag.gsub(/\$\{1\}/, v) } if (defined? lib_flag)
|
|
||||||
return lib_args
|
|
||||||
end
|
end
|
||||||
|
return args
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_library_paths_to_arguments()
|
||||||
|
paths = (defined? PATHS_LIBRARIES) ? (PATHS_LIBRARIES || []).clone : []
|
||||||
|
if (defined? LIBRARIES_PATH_FLAG)
|
||||||
|
paths.map! {|v| LIBRARIES_PATH_FLAG.gsub(/\$\{1\}/, v) }
|
||||||
|
end
|
||||||
|
return paths
|
||||||
|
end
|
||||||
|
|
||||||
def setup_and_invoke(tests, context=TEST_SYM, options={:force_run => true, :build_only => false})
|
def setup_and_invoke(tests, context=TEST_SYM, options={:force_run => true, :build_only => false})
|
||||||
|
|
||||||
@ -83,18 +58,25 @@ class TestInvoker
|
|||||||
test_name ="#{File.basename(test)}".chomp('.c')
|
test_name ="#{File.basename(test)}".chomp('.c')
|
||||||
def_test_key="defines_#{test_name.downcase}"
|
def_test_key="defines_#{test_name.downcase}"
|
||||||
|
|
||||||
# Re-define the project out path and pre-processor defines.
|
if @configurator.project_config_hash.has_key?(def_test_key.to_sym) || @configurator.defines_use_test_definition
|
||||||
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
|
|
||||||
@project_config_manager.test_config_changed
|
|
||||||
defs_bkp = Array.new(COLLECTION_DEFINES_TEST_AND_VENDOR)
|
defs_bkp = Array.new(COLLECTION_DEFINES_TEST_AND_VENDOR)
|
||||||
printf " ************** Specific test definitions for #{test_name} !!! \n"
|
tst_defs_cfg = Array.new(defs_bkp)
|
||||||
tst_defs_cfg = @configurator.project_config_hash[def_test_key.to_sym]
|
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
|
||||||
|
tst_defs_cfg.replace(@configurator.project_config_hash[def_test_key.to_sym])
|
||||||
|
tst_defs_cfg .concat(COLLECTION_DEFINES_VENDOR) if COLLECTION_DEFINES_VENDOR
|
||||||
|
end
|
||||||
|
if @configurator.defines_use_test_definition
|
||||||
|
tst_defs_cfg << File.basename(test, ".*").strip.upcase.sub(/@.*$/, "")
|
||||||
|
end
|
||||||
|
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(tst_defs_cfg)
|
||||||
|
end
|
||||||
|
|
||||||
|
# redefine the project out path and preprocessor defines
|
||||||
|
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
|
||||||
|
@streaminator.stdout_puts("Updating test definitions for #{test_name}", Verbosity::NORMAL)
|
||||||
orig_path = @configurator.project_test_build_output_path
|
orig_path = @configurator.project_test_build_output_path
|
||||||
@configurator.project_config_hash[:project_test_build_output_path] = File.join(@configurator.project_test_build_output_path, test_name)
|
@configurator.project_config_hash[:project_test_build_output_path] = File.join(@configurator.project_test_build_output_path, test_name)
|
||||||
@file_wrapper.mkdir(@configurator.project_test_build_output_path)
|
@file_wrapper.mkdir(@configurator.project_test_build_output_path)
|
||||||
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(tst_defs_cfg)
|
|
||||||
# printf " * new defines = #{COLLECTION_DEFINES_TEST_AND_VENDOR}\n"
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# collect up test fixture pieces & parts
|
# collect up test fixture pieces & parts
|
||||||
@ -103,16 +85,15 @@ class TestInvoker
|
|||||||
sources = @test_invoker_helper.extract_sources( test )
|
sources = @test_invoker_helper.extract_sources( test )
|
||||||
extras = @configurator.collection_test_fixture_extra_link_objects
|
extras = @configurator.collection_test_fixture_extra_link_objects
|
||||||
core = [test] + mock_list + sources
|
core = [test] + mock_list + sources
|
||||||
objects = @file_path_utils.form_test_build_objects_filelist( [runner] + core + extras )
|
objects = @file_path_utils.form_test_build_objects_filelist( [runner] + core + extras ).uniq
|
||||||
results_pass = @file_path_utils.form_pass_results_filepath( test )
|
results_pass = @file_path_utils.form_pass_results_filepath( test )
|
||||||
results_fail = @file_path_utils.form_fail_results_filepath( test )
|
results_fail = @file_path_utils.form_fail_results_filepath( test )
|
||||||
|
|
||||||
@project_config_manager.process_test_defines_change(sources)
|
# identify all the objects shall not be linked and then remove them from objects list.
|
||||||
|
no_link_objects = @file_path_utils.form_test_build_objects_filelist(@preprocessinator.preprocess_shallow_source_includes( test ))
|
||||||
|
objects = objects.uniq - no_link_objects
|
||||||
|
|
||||||
# add the definition value in the build option for the unit test
|
@project_config_manager.process_test_defines_change(@project_config_manager.filter_internal_sources(sources))
|
||||||
if @configurator.defines_use_test_definition
|
|
||||||
add_test_definition(test)
|
|
||||||
end
|
|
||||||
|
|
||||||
# clean results files so we have a missing file with which to kick off rake's dependency rules
|
# clean results files so we have a missing file with which to kick off rake's dependency rules
|
||||||
@test_invoker_helper.clean_results( {:pass => results_pass, :fail => results_fail}, options )
|
@test_invoker_helper.clean_results( {:pass => results_pass, :fail => results_fail}, options )
|
||||||
@ -129,7 +110,7 @@ class TestInvoker
|
|||||||
@dependinator.enhance_test_build_object_dependencies( objects )
|
@dependinator.enhance_test_build_object_dependencies( objects )
|
||||||
|
|
||||||
# associate object files with executable
|
# associate object files with executable
|
||||||
@dependinator.setup_test_executable_dependencies( test, objects )
|
@dependinator.enhance_test_executable_dependencies( test, objects )
|
||||||
|
|
||||||
# build test objects
|
# build test objects
|
||||||
@task_invoker.invoke_test_objects( objects )
|
@task_invoker.invoke_test_objects( objects )
|
||||||
@ -146,18 +127,14 @@ class TestInvoker
|
|||||||
rescue => e
|
rescue => e
|
||||||
@build_invoker_utils.process_exception( e, context )
|
@build_invoker_utils.process_exception( e, context )
|
||||||
ensure
|
ensure
|
||||||
# delete the definition value in the build option for the unit test
|
|
||||||
if @configurator.defines_use_test_definition
|
|
||||||
delete_test_definition(test)
|
|
||||||
end
|
|
||||||
@plugin_manager.post_test( test )
|
@plugin_manager.post_test( test )
|
||||||
# restore the project test defines
|
# restore the project test defines
|
||||||
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
|
if @configurator.project_config_hash.has_key?(def_test_key.to_sym) || @configurator.defines_use_test_definition
|
||||||
# @configurator.project_config_hash[:defines_test] =
|
|
||||||
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(defs_bkp)
|
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(defs_bkp)
|
||||||
# printf " ---- Restored defines at #{defs_bkp}"
|
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
|
||||||
@configurator.project_config_hash[:project_test_build_output_path] = orig_path
|
@configurator.project_config_hash[:project_test_build_output_path] = orig_path
|
||||||
printf " ************** Restored defines and build path\n"
|
@streaminator.stdout_puts("Restored defines and build path to standard", Verbosity::NORMAL)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
@ -11,7 +11,7 @@ class TestInvokerHelper
|
|||||||
def process_deep_dependencies(files)
|
def process_deep_dependencies(files)
|
||||||
return if (not @configurator.project_use_deep_dependencies)
|
return if (not @configurator.project_use_deep_dependencies)
|
||||||
|
|
||||||
dependencies_list = @file_path_utils.form_test_dependencies_filelist( files )
|
dependencies_list = @file_path_utils.form_test_dependencies_filelist( files ).uniq
|
||||||
|
|
||||||
if @configurator.project_generate_deep_dependencies
|
if @configurator.project_generate_deep_dependencies
|
||||||
@task_invoker.invoke_test_dependencies_files( dependencies_list )
|
@task_invoker.invoke_test_dependencies_files( dependencies_list )
|
54
test/unit-test/vendor/ceedling/lib/ceedling/version.rb
vendored
Normal file
54
test/unit-test/vendor/ceedling/lib/ceedling/version.rb
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
|
||||||
|
# @private
|
||||||
|
module Ceedling
|
||||||
|
module Version
|
||||||
|
{ "UNITY" => File.join("unity","src","unity.h"),
|
||||||
|
"CMOCK" => File.join("cmock","src","cmock.h"),
|
||||||
|
"CEXCEPTION" => File.join("c_exception","lib","CException.h")
|
||||||
|
}.each_pair do |name, path|
|
||||||
|
# Check for local or global version of vendor directory in order to look up versions
|
||||||
|
path1 = File.expand_path( File.join("..","..","vendor",path) )
|
||||||
|
path2 = File.expand_path( File.join(File.dirname(__FILE__),"..","..","vendor",path) )
|
||||||
|
filename = if (File.exists?(path1))
|
||||||
|
path1
|
||||||
|
elsif (File.exists?(path2))
|
||||||
|
path2
|
||||||
|
elsif File.exists?(CEEDLING_VENDOR)
|
||||||
|
path3 = File.expand_path( File.join(CEEDLING_VENDOR,path) )
|
||||||
|
if (File.exists?(path3))
|
||||||
|
path3
|
||||||
|
else
|
||||||
|
basepath = File.join( CEEDLING_VENDOR, path.split(/\\\//)[0], 'release')
|
||||||
|
begin
|
||||||
|
[ @ceedling[:file_wrapper].read( File.join(base_path, 'release', 'version.info') ).strip,
|
||||||
|
@ceedling[:file_wrapper].read( File.join(base_path, 'release', 'build.info') ).strip ].join('.')
|
||||||
|
rescue
|
||||||
|
"#{name}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
else
|
||||||
|
module_eval("#{name} = 'unknown'")
|
||||||
|
continue
|
||||||
|
end
|
||||||
|
|
||||||
|
# Actually look up the versions
|
||||||
|
a = [0,0,0]
|
||||||
|
begin
|
||||||
|
File.readlines(filename).each do |line|
|
||||||
|
["VERSION_MAJOR", "VERSION_MINOR", "VERSION_BUILD"].each_with_index do |field, i|
|
||||||
|
m = line.match(/#{name}_#{field}\s+(\d+)/)
|
||||||
|
a[i] = m[1] unless (m.nil?)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
rescue
|
||||||
|
abort("Can't collect data for vendor component: \"#{filename}\" . \nPlease check your setup.")
|
||||||
|
end
|
||||||
|
|
||||||
|
# splat it to return the final value
|
||||||
|
eval("#{name} = '#{a.join(".")}'")
|
||||||
|
end
|
||||||
|
|
||||||
|
GEM = "0.31.1"
|
||||||
|
CEEDLING = GEM
|
||||||
|
end
|
||||||
|
end
|
76
test/unit-test/vendor/ceedling/plugins/bullseye/README.md
vendored
Normal file
76
test/unit-test/vendor/ceedling/plugins/bullseye/README.md
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
ceedling-bullseye
|
||||||
|
=================
|
||||||
|
|
||||||
|
# Plugin Overview
|
||||||
|
|
||||||
|
Plugin for integrating Bullseye code coverage tool into Ceedling projects.
|
||||||
|
This plugin requires a working license to Bullseye code coverage tools. The tools
|
||||||
|
must be within the path or the path should be added to the environment in the
|
||||||
|
`project.yml file`.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
The bullseye plugin supports configuration options via your `project.yml` provided
|
||||||
|
by Ceedling. The following is a typical configuration example:
|
||||||
|
|
||||||
|
```
|
||||||
|
:bullseye:
|
||||||
|
:auto_license: TRUE
|
||||||
|
:plugins:
|
||||||
|
:bullseye_lib_path: []
|
||||||
|
:paths:
|
||||||
|
:bullseye_toolchain_include: []
|
||||||
|
|
||||||
|
:tools:
|
||||||
|
:bullseye_instrumentation:
|
||||||
|
:executable: covc
|
||||||
|
:arguments:
|
||||||
|
- '--file $': ENVIRONMENT_COVFILE
|
||||||
|
- -q
|
||||||
|
- ${1}
|
||||||
|
:bullseye_compiler:
|
||||||
|
:executable: gcc
|
||||||
|
:arguments:
|
||||||
|
- -g
|
||||||
|
- -I"$": COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR
|
||||||
|
- -I"$": COLLECTION_PATHS_BULLSEYE_TOOLCHAIN_INCLUDE
|
||||||
|
- -D$: COLLECTION_DEFINES_TEST_AND_VENDOR
|
||||||
|
- -DBULLSEYE_COMPILER
|
||||||
|
- -c "${1}"
|
||||||
|
- -o "${2}"
|
||||||
|
:bullseye_linker:
|
||||||
|
:executable: gcc
|
||||||
|
:arguments:
|
||||||
|
- ${1}
|
||||||
|
- -o ${2}
|
||||||
|
- -L$: PLUGINS_BULLSEYE_LIB_PATH
|
||||||
|
- -lcov
|
||||||
|
:bullseye_fixture:
|
||||||
|
:executable: ${1}
|
||||||
|
:bullseye_report_covsrc:
|
||||||
|
:executable: covsrc
|
||||||
|
:arguments:
|
||||||
|
- '--file $': ENVIRONMENT_COVFILE
|
||||||
|
- -q
|
||||||
|
- -w140
|
||||||
|
:bullseye_report_covfn:
|
||||||
|
:executable: covfn
|
||||||
|
:stderr_redirect: :auto
|
||||||
|
:arguments:
|
||||||
|
- '--file $': ENVIRONMENT_COVFILE
|
||||||
|
- --width 120
|
||||||
|
- --no-source
|
||||||
|
- '"${1}"'
|
||||||
|
:bullseye_browser:
|
||||||
|
:executable: CoverageBrowser
|
||||||
|
:background_exec: :auto
|
||||||
|
:optional: TRUE
|
||||||
|
:arguments:
|
||||||
|
- '"$"': ENVIRONMENT_COVFILE
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```sh
|
||||||
|
ceedling bullseye:all utils:bullseye
|
||||||
|
```
|
@ -32,12 +32,16 @@ rule(/#{BULLSEYE_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
|
|||||||
end
|
end
|
||||||
|
|
||||||
rule(/#{BULLSEYE_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
|
rule(/#{BULLSEYE_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
|
||||||
|
lib_args = @ceedling[:test_invoker].convert_libraries_to_arguments()
|
||||||
|
lib_paths = @ceedling[:test_invoker].get_library_paths_to_arguments()
|
||||||
@ceedling[:generator].generate_executable_file(
|
@ceedling[:generator].generate_executable_file(
|
||||||
TOOLS_BULLSEYE_LINKER,
|
TOOLS_BULLSEYE_LINKER,
|
||||||
BULLSEYE_SYM,
|
BULLSEYE_SYM,
|
||||||
bin_file.prerequisites,
|
bin_file.prerequisites,
|
||||||
bin_file.name,
|
bin_file.name,
|
||||||
@ceedling[:file_path_utils].form_test_build_map_filepath(bin_file.name)
|
@ceedling[:file_path_utils].form_test_build_map_filepath(bin_file.name),
|
||||||
|
lib_args,
|
||||||
|
lib_paths
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -69,7 +73,7 @@ namespace BULLSEYE_SYM do
|
|||||||
task source_coverage: COLLECTION_ALL_SOURCE.pathmap("#{BULLSEYE_BUILD_OUTPUT_PATH}/%n#{@ceedling[:configurator].extension_object}")
|
task source_coverage: COLLECTION_ALL_SOURCE.pathmap("#{BULLSEYE_BUILD_OUTPUT_PATH}/%n#{@ceedling[:configurator].extension_object}")
|
||||||
|
|
||||||
desc 'Run code coverage for all tests'
|
desc 'Run code coverage for all tests'
|
||||||
task all: [:directories] do
|
task all: [:test_deps] do
|
||||||
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
|
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
|
||||||
@ceedling[BULLSEYE_SYM].enableBullseye(true)
|
@ceedling[BULLSEYE_SYM].enableBullseye(true)
|
||||||
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, BULLSEYE_SYM)
|
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, BULLSEYE_SYM)
|
||||||
@ -86,7 +90,7 @@ namespace BULLSEYE_SYM do
|
|||||||
end
|
end
|
||||||
|
|
||||||
desc 'Run tests by matching regular expression pattern.'
|
desc 'Run tests by matching regular expression pattern.'
|
||||||
task :pattern, [:regex] => [:directories] do |_t, args|
|
task :pattern, [:regex] => [:test_deps] do |_t, args|
|
||||||
matches = []
|
matches = []
|
||||||
|
|
||||||
COLLECTION_ALL_TESTS.each do |test|
|
COLLECTION_ALL_TESTS.each do |test|
|
||||||
@ -104,7 +108,7 @@ namespace BULLSEYE_SYM do
|
|||||||
end
|
end
|
||||||
|
|
||||||
desc 'Run tests whose test path contains [dir] or [dir] substring.'
|
desc 'Run tests whose test path contains [dir] or [dir] substring.'
|
||||||
task :path, [:dir] => [:directories] do |_t, args|
|
task :path, [:dir] => [:test_deps] do |_t, args|
|
||||||
matches = []
|
matches = []
|
||||||
|
|
||||||
COLLECTION_ALL_TESTS.each do |test|
|
COLLECTION_ALL_TESTS.each do |test|
|
||||||
@ -122,7 +126,7 @@ namespace BULLSEYE_SYM do
|
|||||||
end
|
end
|
||||||
|
|
||||||
desc 'Run code coverage for changed files'
|
desc 'Run code coverage for changed files'
|
||||||
task delta: [:directories] do
|
task delta: [:test_deps] do
|
||||||
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
|
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
|
||||||
@ceedling[BULLSEYE_SYM].enableBullseye(true)
|
@ceedling[BULLSEYE_SYM].enableBullseye(true)
|
||||||
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, BULLSEYE_SYM, {:force_run => false})
|
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, BULLSEYE_SYM, {:force_run => false})
|
||||||
@ -138,7 +142,7 @@ namespace BULLSEYE_SYM do
|
|||||||
@ceedling[:file_finder].find_test_from_file_path(test)
|
@ceedling[:file_finder].find_test_from_file_path(test)
|
||||||
end
|
end
|
||||||
]) do |test|
|
]) do |test|
|
||||||
@ceedling[:rake_wrapper][:directories].invoke
|
@ceedling[:rake_wrapper][:test_deps].invoke
|
||||||
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
|
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
|
||||||
@ceedling[BULLSEYE_SYM].enableBullseye(true)
|
@ceedling[BULLSEYE_SYM].enableBullseye(true)
|
||||||
@ceedling[:test_invoker].setup_and_invoke([test.source], BULLSEYE_SYM)
|
@ceedling[:test_invoker].setup_and_invoke([test.source], BULLSEYE_SYM)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user