...
 
Commits (31)
......@@ -8,7 +8,9 @@ install:
- SET "PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
- python --version
- python -m ensurepip
- python -m pip install --egg scons==2.4.1
- pip install -U setuptools
- pip install -U wheel
- pip install scons==2.4.1
- cinst nsis.portable
before_build:
......
......@@ -4,3 +4,6 @@
[submodule "external/Catch"]
path = external/Catch
url = https://github.com/philsquared/Catch.git
[submodule "external/pybind11"]
path = external/pybind11
url = https://github.com/pybind/pybind11.git
cmake_minimum_required (VERSION 3.2)
project (alta CXX)
#############################
# Preconfiguration #
#############################
# Set Python version number
set(Python_Version "3" CACHE STRING "Python interface version (default Python 3)")
set(Python_ADDITIONAL_VERSIONS $(Python_Version))
# Require C++11 options
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
#############################
# Dependencies #
#############################
set(CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/configs/cmake/)
# Find required packages
find_package(Eigen3)
find_package(OpenMP)
find_package(PythonLibs)
find_package(Doxygen)
find_package(Ceres)
find_package(NLOPT)
add_definitions(-DALTA_PLUGIN_DIRECTORY="/usr/lib/alta_plugins")
# Add core and library include files
include_directories("external/build" "sources" ${EIGEN3_INCLUDE_DIR})
include_directories("external" "external/Catch/include" "external/quadprog++")
include_directories("external/pybind11/include")
# Look for header only dependencies
find_file(EIGEN_FOUND "Eigen/Core" HINTS ${EIGEN3_INCLUDE_DIR})
find_file(CATCH_FOUND "catch.hpp" HINTS external/Catch/include)
find_file(FLANN_FOUND "flann/flann.hpp")
find_file(PYBIND_FOUND "pybind11/pybind11.h" HINTS "external/pybind11/include")
# Check if Eigen is found
if(NOT EIGEN_FOUND)
message(FATAL_ERROR "Unable to find Eigen header")
endif()
# Update compilation option to include OpenMP if present
if(OPENMP_FOUND)
set (CMAKE_CXX_FLAGS ${OpenMP_CXX_FLAGS})
endif(OPENMP_FOUND)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
#############################
# Helper functions #
#############################
function(alta_add_plugin name sources)
add_library(${name} SHARED sources/plugins/${sources})
target_link_libraries(${name} core)
endfunction(alta_add_plugin)
function(alta_add_soft name sources)
add_executable(${name} sources/softs/${sources})
target_link_libraries(${name} core)
endfunction(alta_add_soft)
function(alta_test_unit name sources)
add_executable(${name} sources/tests/${sources})
target_link_libraries(${name} core)
add_test(NAME ${name} COMMAND ${CMAKE_BINARY_DIR}/tests/${name} WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}")
set_tests_properties(${name} PROPERTIES ENVIRONMENT "TEST_DATA_DIRECTORY=${CMAKE_SOURCE_DIR}/sources/tests;ALTA_PLUGIN_PATH=${CMAKE_BINARY_DIR}/plugins")
endfunction(alta_test_unit)
function(alta_test_soft name)
add_test(NAME ${name} COMMAND ${CMAKE_BINARY_DIR}/softs/${name} --help)
endfunction(alta_test_soft)
function(alta_download_merl name)
find_file(${name}_FOUND "${name}.binary" PATHS "${CMAKE_BINARY_DIR}/tests/")
if(NOT ${name}_FOUND)
message("Dowloading ${name} from MERL database:")
file(DOWNLOAD
"http://people.csail.mit.edu/wojciech/BRDFDatabase/brdfs/${name}.binary"
"${CMAKE_BINARY_DIR}/tests/${name}.binary" SHOW_PROGRESS)
set(${name}_FOUND TRUE PARENT_SCOPE)
endif()
endfunction(alta_download_merl)
#############################
# ALTA core #
#############################
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
add_library(core STATIC
sources/core/args.h
sources/core/common.h
sources/core/common.cpp
sources/core/metrics.h
sources/core/metrics.cpp
sources/core/params.h
sources/core/params.cpp
sources/core/data.h
sources/core/data.cpp
sources/core/data_storage.h
sources/core/data_storage.cpp
sources/core/vertical_segment.h
sources/core/vertical_segment.cpp
sources/core/function.h
sources/core/function.cpp
sources/core/rational_function.h
sources/core/rational_function.cpp
sources/core/plugins_manager.h
sources/core/plugins_manager.cpp)
add_library(quadprog STATIC
external/quadprog++/QuadProg++.hh
external/quadprog++/QuadProg++.cc)
target_link_libraries(core ${CMAKE_DL_LIBS})
#############################
# Plugins #
#############################
# set(CMAKE_SHARED_LIBRARY_PREFIX "")
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/plugins)
# Data
alta_add_plugin(data_astm data_io/astm.cpp)
alta_add_plugin(data_merl data_io/merl.cpp)
alta_add_plugin(data_utia data_io/utia.cpp)
alta_add_plugin(data_brdf_slice data_io/slice.cpp)
alta_add_plugin(data_grid data_interpolants/grid.cpp)
if(FLANN_FOUND)
alta_add_plugin(data_rbf data_interpolants/rbf.cpp)
endif()
#alta_add_plugin(data_matlab data_interpolants/matlab.cpp)
# Functions
alta_add_plugin(rational_function_legendre rational_function_legendre/rational_function.cpp)
alta_add_plugin(rational_function_chebychev rational_function_chebychev/rational_function.cpp)
alta_add_plugin(rational_function_cosine rational_function_cosine/rational_function.cpp)
alta_add_plugin(nonlinear_function_abc nonlinear_function_abc/function.cpp)
alta_add_plugin(nonlinear_function_beckmann nonlinear_function_beckmann/function.cpp)
alta_add_plugin(nonlinear_function_blinn nonlinear_function_blinn/function.cpp)
alta_add_plugin(nonlinear_function_diffuse nonlinear_function_diffuse/function.cpp)
alta_add_plugin(nonlinear_function_lafortune nonlinear_function_lafortune/function.cpp)
alta_add_plugin(nonlinear_function_sgd nonlinear_function_sgd/function.cpp)
alta_add_plugin(nonlinear_function_spherical_gaussian nonlinear_function_spherical_gaussian/function.cpp)
alta_add_plugin(nonlinear_function_ward nonlinear_function_ward/function.cpp)
alta_add_plugin(nonlinear_function_retrobeckmann nonlinear_function_retrobeckmann/function.cpp)
alta_add_plugin(nonlinear_function_retroblinn nonlinear_function_retroblinn/function.cpp)
alta_add_plugin(nonlinear_function_retroyoo nonlinear_function_retroyoo/function.cpp)
alta_add_plugin(nonlinear_fresnel_retroschlick nonlinear_fresnel_retroschlick/function.cpp)
alta_add_plugin(nonlinear_fresnel_schlick nonlinear_fresnel_schlick/function.cpp)
alta_add_plugin(nonlinear_fresnel_normalized_schlick nonlinear_fresnel_normalized_schlick/function.cpp)
alta_add_plugin(nonlinear_shadowing_schlick nonlinear_shadowing_schlick/function.cpp)
alta_add_plugin(nonlinear_shadowing_smith nonlinear_shadowing_smith/function.cpp)
alta_add_plugin(nonlinear_shadowing_walter_smith nonlinear_shadowing_walter_smith/function.cpp)
# Fitters
alta_add_plugin(rational_fitter_eigen rational_fitter_eigen/rational_fitter.cpp)
alta_add_plugin(rational_fitter_leastsquare rational_fitter_leastsquare/rational_fitter.cpp)
alta_add_plugin(rational_fitter_quadprog rational_fitter_quadprog/rational_fitter.cpp)
alta_add_plugin(rational_fitter_parallel rational_fitter_parallel/rational_fitter.cpp)
alta_add_plugin(nonlinear_fitter_eigen nonlinear_fitter_eigen/fitter.cpp)
target_link_libraries(rational_fitter_quadprog quadprog)
target_link_libraries(rational_fitter_parallel quadprog)
# TODO: Add check before compiling IPOPT
if (CERES_FOUND)
include_directories( ${CERES_INCLUDE_DIR} )
alta_add_plugin(nonlinear_fitter_ceres nonlinear_fitter_ceres/fitter.cpp)
target_link_libraries(nonlinear_fitter_ceres ${CERES_LIBRARIES})
endif()
if (NLOPT_FOUND)
include_directories( ${NLOPT_INCLUDE_DIRS} )
alta_add_plugin(nonlinear_fitter_nlopt nonlinear_fitter_nlopt/fitter.cpp)
target_link_libraries(nonlinear_fitter_nlopt ${NLOPT_LIBRARIES})
endif()
#alta_add_plugin(nonlinear_fitter_ipopt nonlinear_fitter_ipopt/fitter.cpp)
# Python bindings
if(PYTHONLIBS_FOUND AND PYBIND_FOUND)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/python)
include_directories(${PYTHON_INCLUDE_DIRS})
add_library(alta SHARED sources/python/alta.cpp)
target_link_libraries(alta core ${PYTHON_LIBRARIES})
set_target_properties(alta PROPERTIES PREFIX "")
if(APPLE)
set_target_properties(alta PROPERTIES SUFFIX ".so")
endif()
endif()
#############################
# Softs #
#############################
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/softs)
alta_add_soft(brdf2brdf brdf2brdf/main.cpp)
alta_add_soft(brdf2data brdf2data/main.cpp)
alta_add_soft(brdf2gnuplot brdf2gnuplot/main.cpp)
#alta_add_soft(brdf2moments brdf2moments/main.cpp)
alta_add_soft(brdf2stats fit2stat/fit2stat.cpp)
alta_add_soft(data2data data2data/main.cpp)
alta_add_soft(data2brdf data2brdf/main.cpp)
alta_add_soft(data2stats data2stats/data2stats.cpp)
alta_add_soft(data2moments data2moments/main.cpp)
#############################
# Tests #
#############################
enable_testing()
# Test all softs with `--help` option
alta_test_soft(brdf2data)
alta_test_soft(data2data)
alta_test_soft(data2stats)
alta_test_soft(data2brdf)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/tests)
include_directories("sources/tests")
# Unit test
alta_test_unit(test_data_io core/data-io.cpp)
alta_test_unit(half-test-1 core/half-test-1.cpp)
alta_test_unit(half-test-2 core/half-test-2.cpp)
alta_test_unit(half-test-3 core/half-test-3.cpp)
alta_test_unit(half-test-4 core/half-test-4.cpp)
alta_test_unit(nonlinear-fit core/nonlinear-fit.cpp)
alta_test_unit(params-test-1 core/params-test-1.cpp)
alta_test_unit(params-test-2 core/params-test-2.cpp)
if(CPPQUICKCHECK_FOUND)
alta_test_unit(params-qc-1 core/params-qc-1.cpp)
endif()
# Integration test for rational function fitting
foreach(fitter IN ITEMS eigen leastsquare quadprog parallel)
add_test(NAME "data2dbrdf_kirby_${fitter}"
COMMAND "data2brdf" "--input" "${CMAKE_SOURCE_DIR}/sources/tests/Kirby2.dat"
"--output" "Kirby2.func"
"--fitter" "rational_fitter_${fitter}"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
set_tests_properties("data2dbrdf_kirby_${fitter}"
PROPERTIES ENVIRONMENT "ALTA_PLUGIN_PATH=${CMAKE_BINARY_DIR}/plugins")
endforeach()
add_test(NAME "brdf2data_kirby"
COMMAND "brdf2data" "--input" "Kirby2.func"
"--output" "Kirby2.dat"
"--data-file" "${CMAKE_SOURCE_DIR}/sources/tests/Kirby2.dat"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
add_test(NAME "data2stats_kirby"
COMMAND "data2stats" "--input" "Kirby2.dat"
"--ref" "${CMAKE_SOURCE_DIR}/sources/tests/Kirby2.dat"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
# Integration tests using MERL data
#
alta_download_merl("gold-metallic-paint")
if(NOT gold-metallic-paint_FOUND)
message("Error checking for gold-metallic-paint.binary but not found")
endif()
if(gold-metallic-paint_FOUND)
# Converting back an forth between MERL format and BRDF Slice format
# At the end of this batch, there should be multiple file of the same
# view of the gold metallic paint.
#
add_test(NAME "data2data_gold_slice_stark"
COMMAND "data2data" "--input" "gold-metallic-paint.binary"
"--output" "gold-metallic-paint-stark.exr"
"--in-data" "data_merl"
"--out-data" "data_brdf_slice"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
add_test(NAME "data2data_gold_slice_merl"
COMMAND "data2data" "--input" "gold-metallic-paint-stark.exr"
"--output" "gold-metallic-paint-2.binary"
"--in-data" "data_brdf_slice"
"--out-data" "data_merl"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
add_test(NAME "data2data_gold_slice_rusin_1"
COMMAND "data2data" "--input" "gold-metallic-paint.binary"
"--output" "gold-metallic-paint-rusin-1.exr"
"--in-data" "data_merl"
"--param" "RUSIN_TH_TD"
"--out-data" "data_brdf_slice"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
add_test(NAME "data2data_gold_alta_rusin_1"
COMMAND "data2data" "--input" "gold-metallic-paint-rusin-1.exr"
"--output" "gold-metallic-paint-rusin-1.alta"
"--in-data" "data_brdf_slice"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
add_test(NAME "data2data_gold_slice_rusin_2"
COMMAND "data2data" "--input" "gold-metallic-paint.binary"
"--output" "gold-metallic-paint-rusin-2.exr"
"--in-data" "data_merl"
"--param" "RUSIN_TH_TD_PD"
"--angle" "90"
"--out-data" "data_brdf_slice"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
# Remove zeros from the BRDF slice plugin
#
add_test(NAME "data2data_gold_remove_zeros"
COMMAND "data2data" "--input" "gold-metallic-paint-stark.exr"
"--output" "gold-metallic-paint-stark.alta"
"--in-data" "data_brdf_slice"
"--ymin" "[0.001, 0.001, 0.001]"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
set_tests_properties("data2data_gold_slice_stark"
"data2data_gold_remove_zeros"
"data2data_gold_slice_merl"
"data2data_gold_slice_rusin_1"
"data2data_gold_alta_rusin_1"
"data2data_gold_slice_rusin_2"
PROPERTIES ENVIRONMENT "ALTA_PLUGIN_PATH=${CMAKE_BINARY_DIR}/plugins")
# Test all the rational fitters on the gold metallic data
# At the end of this pass, there should be one ALTA function file per fitter
#
foreach(fitter IN ITEMS eigen leastsquare quadprog parallel)
add_test(NAME "data2dbrdf_gold_rf_${fitter}"
COMMAND "data2brdf" "--output" "gold-metallic-paint-rf-${fitter}.func"
"--input" "gold-metallic-paint-rusin-1.alta"
"--fitter" "rational_fitter_${fitter}"
"--min-np" "100"
"--np" "100"
"--nq" "50"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
set_tests_properties("data2dbrdf_gold_rf_${fitter}"
PROPERTIES ENVIRONMENT "ALTA_PLUGIN_PATH=${CMAKE_BINARY_DIR}/plugins")
add_test(NAME "brdf2data_gold_rf_${fitter}"
COMMAND "brdf2data" "--input" "gold-metallic-paint-rf-${fitter}.func"
"--output" "gold-metallic-paint-${fitter}.exr"
"--data" "data_brdf_slice"
"--data-file" "gold-metallic-paint-rusin-1.exr"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
set_tests_properties("brdf2data_gold_rf_${fitter}"
PROPERTIES ENVIRONMENT "ALTA_PLUGIN_PATH=${CMAKE_BINARY_DIR}/plugins")
endforeach()
# Test all the nonlinear fitters on the gold metallic data
# At the end of this pass, there should be one ALTA function file per fitter
#
foreach(fitter IN ITEMS eigen ceres nlopt)
add_test(NAME "data2dbrdf_gold_${fitter}"
COMMAND "data2brdf" "--input" "gold-metallic-paint-stark.alta"
"--output" "gold-metallic-paint-${fitter}.func"
"--fitter" "nonlinear_fitter_${fitter}"
"--func" "[nonlinear_function_diffuse, nonlinear_function_blinn]"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/tests")
set_tests_properties("data2dbrdf_gold_${fitter}"
PROPERTIES ENVIRONMENT "ALTA_PLUGIN_PATH=${CMAKE_BINARY_DIR}/plugins")
endforeach()
endif()
if(gold-metallic-paint_FOUND AND CATCH_FOUND)
alta_test_unit(conversion-catch-1 core/conversion-catch-1.cpp )
endif()
# add a target to generate API documentation with Doxygen
if(DOXYGEN_FOUND)
add_custom_target(doc
${DOXYGEN_EXECUTABLE} ${CMAKE_SOURCE_DIR}/documents/doxygen.conf
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/documents/
COMMENT "Generating API documentation with Doxygen" VERBATIM)
endif(DOXYGEN_FOUND)
# Ceres Solver - A fast non-linear least squares minimizer
# Copyright 2015 Google Inc. All rights reserved.
# http://ceres-solver.org/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Google Inc. nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Authors: pablo.speciale@gmail.com (Pablo Speciale)
# alexs.mac@gmail.com (Alex Stewart)
#
# Config file for Ceres Solver - Find Ceres & dependencies.
#
# This file is used by CMake when find_package(Ceres) is invoked and either
# the directory containing this file either is present in CMAKE_MODULE_PATH
# (if Ceres was installed), or exists in the local CMake package registry if
# the Ceres build directory was exported.
#
# This module defines the following variables:
#
# Ceres_FOUND / CERES_FOUND: True if Ceres has been successfully
# found. Both variables are set as although
# FindPackage() only references Ceres_FOUND
# in Config mode, given the conventions for
# <package>_FOUND when FindPackage() is
# called in Module mode, users could
# reasonably expect to use CERES_FOUND
# instead.
#
# CERES_VERSION: Version of Ceres found.
#
# CERES_INCLUDE_DIRS: Include directories for Ceres and the
# dependencies which appear in the Ceres public
# API and are thus required to use Ceres.
#
# CERES_LIBRARIES: Libraries for Ceres and all
# dependencies against which Ceres was
# compiled. This will not include any optional
# dependencies that were disabled when Ceres was
# compiled.
#
# The following variables are also defined for legacy compatibility
# only. Any new code should not use them as they do not conform to
# the standard CMake FindPackage naming conventions.
#
# CERES_INCLUDES = ${CERES_INCLUDE_DIRS}.
# Called if we failed to find Ceres or any of its required dependencies,
# unsets all public (designed to be used externally) variables and reports
# error message at priority depending upon [REQUIRED/QUIET/<NONE>] argument.
macro(CERES_REPORT_NOT_FOUND REASON_MSG)
# FindPackage() only references Ceres_FOUND, and requires it to be
# explicitly set FALSE to denote not found (not merely undefined).
set(Ceres_FOUND FALSE)
set(CERES_FOUND FALSE)
unset(CERES_INCLUDE_DIRS)
unset(CERES_LIBRARIES)
# Reset the CMake module path to its state when this script was called.
set(CMAKE_MODULE_PATH ${CALLERS_CMAKE_MODULE_PATH})
# Note <package>_FIND_[REQUIRED/QUIETLY] variables defined by
# FindPackage() use the camelcase library name, not uppercase.
if (Ceres_FIND_QUIETLY)
message(STATUS "Failed to find Ceres - " ${REASON_MSG} ${ARGN})
elseif (Ceres_FIND_REQUIRED)
message(FATAL_ERROR "Failed to find Ceres - " ${REASON_MSG} ${ARGN})
else()
# Neither QUIETLY nor REQUIRED, use SEND_ERROR which emits an error
# that prevents generation, but continues configuration.
message(SEND_ERROR "Failed to find Ceres - " ${REASON_MSG} ${ARGN})
endif ()
return()
endmacro(CERES_REPORT_NOT_FOUND)
# ceres_pretty_print_cmake_list( OUTPUT_VAR [item1 [item2 ... ]] )
#
# Sets ${OUTPUT_VAR} in the caller's scope to a human-readable string
# representation of the list passed as the remaining arguments formed
# as: "[item1, item2, ..., itemN]".
function(ceres_pretty_print_cmake_list OUTPUT_VAR)
string(REPLACE ";" ", " PRETTY_LIST_STRING "[${ARGN}]")
set(${OUTPUT_VAR} "${PRETTY_LIST_STRING}" PARENT_SCOPE)
endfunction()
# The list of (optional) components this version of Ceres was compiled with.
set(CERES_COMPILED_COMPONENTS "EigenSparse;SparseLinearAlgebraLibrary;SchurSpecializations;C++11;OpenMP;Multithreading")
# If Ceres was not installed, then by definition it was exported
# from a build directory.
set(CERES_WAS_INSTALLED TRUE)
# Record the state of the CMake module path when this script was
# called so that we can ensure that we leave it in the same state on
# exit as it was on entry, but modify it locally.
set(CALLERS_CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH})
# Get the (current, i.e. installed) directory containing this file.
get_filename_component(CERES_CURRENT_CONFIG_DIR
"${CMAKE_CURRENT_LIST_FILE}" PATH)
if (CERES_WAS_INSTALLED)
# Reset CMake module path to the installation directory of this
# script, thus we will use the FindPackage() scripts shipped with
# Ceres to find Ceres' dependencies, even if the user has equivalently
# named FindPackage() scripts in their project.
set(CMAKE_MODULE_PATH ${CERES_CURRENT_CONFIG_DIR})
# Build the absolute root install directory as a relative path
# (determined when Ceres was configured & built) from the current
# install directory for this this file. This allows for the install
# tree to be relocated, after Ceres was built, outside of CMake.
get_filename_component(CURRENT_ROOT_INSTALL_DIR
${CERES_CURRENT_CONFIG_DIR}/../
ABSOLUTE)
if (NOT EXISTS ${CURRENT_ROOT_INSTALL_DIR})
ceres_report_not_found(
"Ceres install root: ${CURRENT_ROOT_INSTALL_DIR}, "
"determined from relative path from CeresConfig.cmake install location: "
"${CERES_CURRENT_CONFIG_DIR}, does not exist. Either the install "
"directory was deleted, or the install tree was only partially relocated "
"outside of CMake after Ceres was built.")
endif (NOT EXISTS ${CURRENT_ROOT_INSTALL_DIR})
# Set the include directories for Ceres (itself).
set(CERES_INCLUDE_DIR "${CURRENT_ROOT_INSTALL_DIR}/include")
if (NOT EXISTS ${CERES_INCLUDE_DIR}/ceres/ceres.h)
ceres_report_not_found(
"Ceres install root: ${CURRENT_ROOT_INSTALL_DIR}, "
"determined from relative path from CeresConfig.cmake install location: "
"${CERES_CURRENT_CONFIG_DIR}, does not contain Ceres headers. "
"Either the install directory was deleted, or the install tree was only "
"partially relocated outside of CMake after Ceres was built.")
endif (NOT EXISTS ${CERES_INCLUDE_DIR}/ceres/ceres.h)
list(APPEND CERES_INCLUDE_DIRS ${CERES_INCLUDE_DIR})
else(CERES_WAS_INSTALLED)
# Ceres was exported from the build tree.
set(CERES_EXPORTED_BUILD_DIR ${CERES_CURRENT_CONFIG_DIR})
get_filename_component(CERES_EXPORTED_SOURCE_DIR
${CERES_EXPORTED_BUILD_DIR}/../
ABSOLUTE)
if (NOT EXISTS ${CERES_EXPORTED_SOURCE_DIR})
ceres_report_not_found(
"Ceres exported source directory: ${CERES_EXPORTED_SOURCE_DIR}, "
"determined from relative path from CeresConfig.cmake exported build "
"directory: ${CERES_EXPORTED_BUILD_DIR} does not exist.")
endif()
# Reset CMake module path to the cmake directory in the Ceres source
# tree which was exported, thus we will use the FindPackage() scripts shipped
# with Ceres to find Ceres' dependencies, even if the user has equivalently
# named FindPackage() scripts in their project.
set(CMAKE_MODULE_PATH ${CERES_EXPORTED_SOURCE_DIR}/cmake)
# Set the include directories for Ceres (itself).
set(CERES_INCLUDE_DIR "${CERES_EXPORTED_SOURCE_DIR}/include")
if (NOT EXISTS ${CERES_INCLUDE_DIR}/ceres/ceres.h)
ceres_report_not_found(
"Ceres exported source directory: ${CERES_EXPORTED_SOURCE_DIR}, "
"determined from relative path from CeresConfig.cmake exported build "
"directory: ${CERES_EXPORTED_BUILD_DIR}, does not contain Ceres headers.")
endif (NOT EXISTS ${CERES_INCLUDE_DIR}/ceres/ceres.h)
list(APPEND CERES_INCLUDE_DIRS ${CERES_INCLUDE_DIR})
# Append the path to the configured config.h in the exported build directory
# to the Ceres include directories.
set(CERES_CONFIG_FILE
${CERES_EXPORTED_BUILD_DIR}/config/ceres/internal/config.h)
if (NOT EXISTS ${CERES_CONFIG_FILE})
ceres_report_not_found(
"Ceres exported build directory: ${CERES_EXPORTED_BUILD_DIR}, "
"does not contain required configured Ceres config.h, it is not here: "
"${CERES_CONFIG_FILE}.")
endif (NOT EXISTS ${CERES_CONFIG_FILE})
list(APPEND CERES_INCLUDE_DIRS ${CERES_EXPORTED_BUILD_DIR}/config)
endif(CERES_WAS_INSTALLED)
# Set the version.
set(CERES_VERSION 1.14.0 )
# Eigen.
# Flag set during configuration and build of Ceres.
set(CERES_EIGEN_VERSION 3.3.4)
set(EIGEN_WAS_BUILT_WITH_CMAKE TRUE)
# Append the locations of Eigen when Ceres was built to the search path hints.
if (EIGEN_WAS_BUILT_WITH_CMAKE)
set(Eigen3_DIR C:/Program Files/Eigen3/share/eigen3/cmake)
set(EIGEN_PREFER_EXPORTED_EIGEN_CMAKE_CONFIGURATION TRUE)
else()
list(APPEND EIGEN_INCLUDE_DIR_HINTS C:/Program Files/Eigen3/include/eigen3)
endif()
# Search quietly to control the timing of the error message if not found. The
# search should be for an exact match, but for usability reasons do a soft
# match and reject with an explanation below.
find_package(Eigen ${CERES_EIGEN_VERSION} QUIET)
if (EIGEN_FOUND)
if (NOT EIGEN_VERSION VERSION_EQUAL CERES_EIGEN_VERSION)
# CMake's VERSION check in FIND_PACKAGE() will accept any version >= the
# specified version. However, only version = is supported. Improve
# usability by explaining why we don't accept non-exact version matching.
ceres_report_not_found("Found Eigen dependency, but the version of Eigen "
"found (${EIGEN_VERSION}) does not exactly match the version of Eigen "
"Ceres was compiled with (${CERES_EIGEN_VERSION}). This can cause subtle "
"bugs by triggering violations of the One Definition Rule. See the "
"Wikipedia article http://en.wikipedia.org/wiki/One_Definition_Rule "
"for more details")
endif ()
message(STATUS "Found required Ceres dependency: "
"Eigen version ${CERES_EIGEN_VERSION} in ${EIGEN_INCLUDE_DIRS}")
else (EIGEN_FOUND)
ceres_report_not_found("Missing required Ceres "
"dependency: Eigen version ${CERES_EIGEN_VERSION}, please set "
"EIGEN_INCLUDE_DIR.")
endif (EIGEN_FOUND)
list(APPEND CERES_INCLUDE_DIRS ${EIGEN_INCLUDE_DIRS})
# Glog.
# Flag set during configuration and build of Ceres.
set(CERES_USES_MINIGLOG OFF)
set(CERES_USES_GFLAGS ON)
if (CERES_USES_MINIGLOG)
set(MINIGLOG_INCLUDE_DIR ${CERES_INCLUDE_DIR}/ceres/internal/miniglog)
if (NOT CERES_WAS_INSTALLED)
# When Ceres was exported from the build tree, the miniglog headers
# will be in Ceres internal source directory, not in the public headers
# directory (they are copied with the public headers when installed).
set(MINIGLOG_INCLUDE_DIR
${CERES_EXPORTED_SOURCE_DIR}/internal/ceres/miniglog)
endif()
if (NOT EXISTS ${MINIGLOG_INCLUDE_DIR})
ceres_report_not_found(
"Failed to find miniglog headers in expected include directory: "
"${MINIGLOG_INCLUDE_DIR}, but Ceres was compiled with MINIGLOG enabled "
"(in place of glog).")
endif (NOT EXISTS ${MINIGLOG_INCLUDE_DIR})
list(APPEND CERES_INCLUDE_DIRS ${MINIGLOG_INCLUDE_DIR})
# Output message at standard log level (not the lower STATUS) so that
# the message is output in GUI during configuration to warn user.
message("-- Found Ceres compiled with miniglog substitute "
"for glog, beware this will likely cause problems if glog is later linked.")
else (CERES_USES_MINIGLOG)
# Append the locations of glog when Ceres was built to the search path hints.
set(GLOG_WAS_BUILT_WITH_CMAKE 1)
if (GLOG_WAS_BUILT_WITH_CMAKE)
set(glog_DIR C:/Program Files/glog/lib/cmake/glog)
set(GLOG_PREFER_EXPORTED_GLOG_CMAKE_CONFIGURATION TRUE)
else()
list(APPEND GLOG_INCLUDE_DIR_HINTS )
get_filename_component(CERES_BUILD_GLOG_LIBRARY_DIR glog::glog PATH)
list(APPEND GLOG_LIBRARY_DIR_HINTS ${CERES_BUILD_GLOG_LIBRARY_DIR})
endif()
# Search quietly s/t we control the timing of the error message if not found.
find_package(Glog QUIET)
if (GLOG_FOUND)
message(STATUS "Found required Ceres dependency: glog")
else (GLOG_FOUND)
ceres_report_not_found("Missing required Ceres "
"dependency: glog. Searched using GLOG_INCLUDE_DIR_HINTS: "
"${GLOG_INCLUDE_DIR_HINTS} and glog_DIR: ${glog_DIR}.")
endif (GLOG_FOUND)
list(APPEND CERES_INCLUDE_DIRS ${GLOG_INCLUDE_DIRS})
# gflags is only a public dependency of Ceres via glog, thus is not required
# if Ceres was built with MINIGLOG.
if (CERES_USES_GFLAGS)
# If gflags was found as an imported CMake target, we need to call
# find_packge(Gflags) again here, as imported CMake targets are not
# re-exported. Without this, the 'gflags-shared' target name which is
# present in CERES_LIBRARIES in this case would not be defined, and so
# CMake will assume it is a library name (which it is not) and fail to link.
#
# Append the locations of gflags when Ceres was built to the search path
# hints.
set(GFLAGS_WAS_BUILT_WITH_CMAKE 1)
if (GFLAGS_WAS_BUILT_WITH_CMAKE)
set(gflags_DIR C:/Program Files/gflags/lib/cmake/gflags)
set(GFLAGS_PREFER_EXPORTED_GFLAGS_CMAKE_CONFIGURATION TRUE)
else()
list(APPEND GFLAGS_INCLUDE_DIR_HINTS C:/Program Files/gflags/include)
get_filename_component(CERES_BUILD_GFLAGS_LIBRARY_DIR gflags_shared PATH)
list(APPEND GFLAGS_LIBRARY_DIR_HINTS ${CERES_BUILD_GFLAGS_LIBRARY_DIR})
endif()
# Search quietly s/t we control the timing of the error message if not found.
find_package(Gflags QUIET)
if (GFLAGS_FOUND)
message(STATUS "Found required Ceres dependency: gflags")
else()
ceres_report_not_found("Missing required Ceres "
"dependency: gflags. Searched using GFLAGS_INCLUDE_DIR_HINTS: "
"${GFLAGS_INCLUDE_DIR_HINTS} and gflags_DIR: ${gflags_DIR}.")
endif()
list(APPEND CERES_INCLUDE_DIRS ${GFLAGS_INCLUDE_DIR_HINTS})
endif()
endif (CERES_USES_MINIGLOG)
# Import exported Ceres targets, if they have not already been imported.
if (NOT TARGET ceres AND NOT Ceres_BINARY_DIR)
include(${CERES_CURRENT_CONFIG_DIR}/CeresTargets.cmake)
endif (NOT TARGET ceres AND NOT Ceres_BINARY_DIR)
# Set the expected XX_LIBRARIES variable for FindPackage().
set(CERES_LIBRARIES ceres)
# Make user aware of any compile flags that will be added to their targets
# which use Ceres (i.e. flags exported in the Ceres target). Only CMake
# versions >= 2.8.12 support target_compile_options/features().
if (CERES_COMPILED_COMPONENTS MATCHES ".*C\\+\\+11.*") # Search for C++11.
set(CERES_WAS_COMPILED_WITH_CXX11 TRUE)
endif()
if (TARGET ${CERES_LIBRARIES} AND
CERES_WAS_COMPILED_WITH_CXX11 AND
NOT CMAKE_VERSION VERSION_LESS "2.8.12")
if (CERES_WAS_INSTALLED)
set(CERES_LOCATION "${CURRENT_ROOT_INSTALL_DIR}")
else()
set(CERES_LOCATION "${CERES_EXPORTED_BUILD_DIR}")
endif()
message(STATUS "Ceres version ${CERES_VERSION} detected here: "
"${CERES_LOCATION} was built with C++11. Ceres "
"target will add C++11 flags to compile options for "
"targets using it.")
endif()
# Set legacy include directories variable for backwards compatibility.
set(CERES_INCLUDES ${CERES_INCLUDE_DIRS})
# Reset CMake module path to its state when this script was called.
set(CMAKE_MODULE_PATH ${CALLERS_CMAKE_MODULE_PATH})
# Build the detected Ceres version string to correctly capture whether it
# was installed, or exported.
ceres_pretty_print_cmake_list(CERES_COMPILED_COMPONENTS_STRING
${CERES_COMPILED_COMPONENTS})
if (CERES_WAS_INSTALLED)
set(CERES_DETECTED_VERSION_STRING "Ceres version: ${CERES_VERSION} "
"installed in: ${CURRENT_ROOT_INSTALL_DIR} with components: "
"${CERES_COMPILED_COMPONENTS_STRING}")
else (CERES_WAS_INSTALLED)
set(CERES_DETECTED_VERSION_STRING "Ceres version: ${CERES_VERSION} "
"exported from build directory: ${CERES_EXPORTED_BUILD_DIR} with "
"components: ${CERES_COMPILED_COMPONENTS_STRING}")
endif()
# If the user called this script through find_package() whilst specifying
# particular Ceres components that should be found via:
# find_package(Ceres COMPONENTS XXX YYY), check the requested components against
# those with which Ceres was compiled. In this case, we should only report
# Ceres as found if all the requested components have been found.
if (Ceres_FIND_COMPONENTS)
foreach (REQUESTED_COMPONENT ${Ceres_FIND_COMPONENTS})
list(FIND CERES_COMPILED_COMPONENTS ${REQUESTED_COMPONENT} HAVE_REQUESTED_COMPONENT)
# list(FIND ..) returns -1 if the element was not in the list, but CMake
# interprets if (VAR) to be true if VAR is any non-zero number, even
# negative ones, hence we have to explicitly check for >= 0.
if (HAVE_REQUESTED_COMPONENT EQUAL -1)
# Check for the presence of all requested components before reporting
# not found, such that we report all of the missing components rather
# than just the first.
list(APPEND MISSING_CERES_COMPONENTS ${REQUESTED_COMPONENT})
endif()
endforeach()
if (MISSING_CERES_COMPONENTS)
ceres_pretty_print_cmake_list(REQUESTED_CERES_COMPONENTS_STRING
${Ceres_FIND_COMPONENTS})
ceres_pretty_print_cmake_list(MISSING_CERES_COMPONENTS_STRING
${MISSING_CERES_COMPONENTS})
ceres_report_not_found("Missing requested Ceres components: "
"${MISSING_CERES_COMPONENTS_STRING} (components requested: "
"${REQUESTED_CERES_COMPONENTS_STRING}). Detected "
"${CERES_DETECTED_VERSION_STRING}.")
endif()
endif()
# As we use CERES_REPORT_NOT_FOUND() to abort, if we reach this point we have
# found Ceres and all required dependencies.
message(STATUS "Found " ${CERES_DETECTED_VERSION_STRING})
# Set CERES_FOUND to be equivalent to Ceres_FOUND, which is set to
# TRUE by FindPackage() if this file is found and run, and after which
# Ceres_FOUND is not (explicitly, i.e. undefined does not count) set
# to FALSE.
set(CERES_FOUND TRUE)
# Ceres Solver - A fast non-linear least squares minimizer
# Copyright 2015 Google Inc. All rights reserved.
# http://ceres-solver.org/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Google Inc. nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: pablo.speciale@gmail.com (Pablo Speciale)
#
# FIND_PACKAGE() searches for a <package>Config.cmake file and an associated
# <package>Version.cmake file, which it loads to check the version number.
#
# This file can be used with CONFIGURE_FILE() to generate such a file for a
# project with very basic logic.
#
# It sets PACKAGE_VERSION_EXACT if the current version string and the requested
# version string are exactly the same and it sets PACKAGE_VERSION_COMPATIBLE
# if the current version is >= requested version.
set(PACKAGE_VERSION 1.14.0)
if ("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}")
set(PACKAGE_VERSION_COMPATIBLE FALSE)
else ("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}")
set(PACKAGE_VERSION_COMPATIBLE TRUE)
if ("${PACKAGE_FIND_VERSION}" STREQUAL "${PACKAGE_VERSION}")
set(PACKAGE_VERSION_EXACT TRUE)
endif ("${PACKAGE_FIND_VERSION}" STREQUAL "${PACKAGE_VERSION}")
endif ("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}")
# - Try to find Eigen3 lib
#
# This module supports requiring a minimum version, e.g. you can do
# find_package(Eigen3 3.1.2)
# to require version 3.1.2 or newer of Eigen3.
#
# Once done this will define
#
# EIGEN3_FOUND - system has eigen lib with correct version
# EIGEN3_INCLUDE_DIR - the eigen include directory
# EIGEN3_VERSION - eigen version
#
# This module reads hints about search locations from
# the following enviroment variables:
#
# EIGEN3_ROOT
# EIGEN3_ROOT_DIR
# Copyright (c) 2006, 2007 Montel Laurent, <montel@kde.org>
# Copyright (c) 2008, 2009 Gael Guennebaud, <g.gael@free.fr>
# Copyright (c) 2009 Benoit Jacob <jacob.benoit.1@gmail.com>
# Redistribution and use is allowed according to the terms of the 2-clause BSD license.
if(NOT Eigen3_FIND_VERSION)
if(NOT Eigen3_FIND_VERSION_MAJOR)
set(Eigen3_FIND_VERSION_MAJOR 2)
endif(NOT Eigen3_FIND_VERSION_MAJOR)
if(NOT Eigen3_FIND_VERSION_MINOR)
set(Eigen3_FIND_VERSION_MINOR 91)
endif(NOT Eigen3_FIND_VERSION_MINOR)
if(NOT Eigen3_FIND_VERSION_PATCH)
set(Eigen3_FIND_VERSION_PATCH 0)
endif(NOT Eigen3_FIND_VERSION_PATCH)
set(Eigen3_FIND_VERSION "${Eigen3_FIND_VERSION_MAJOR}.${Eigen3_FIND_VERSION_MINOR}.${Eigen3_FIND_VERSION_PATCH}")
endif(NOT Eigen3_FIND_VERSION)
macro(_eigen3_check_version)
file(READ "${EIGEN3_INCLUDE_DIR}/Eigen/src/Core/util/Macros.h" _eigen3_version_header)
string(REGEX MATCH "define[ \t]+EIGEN_WORLD_VERSION[ \t]+([0-9]+)" _eigen3_world_version_match "${_eigen3_version_header}")
set(EIGEN3_WORLD_VERSION "${CMAKE_MATCH_1}")
string(REGEX MATCH "define[ \t]+EIGEN_MAJOR_VERSION[ \t]+([0-9]+)" _eigen3_major_version_match "${_eigen3_version_header}")
set(EIGEN3_MAJOR_VERSION "${CMAKE_MATCH_1}")
string(REGEX MATCH "define[ \t]+EIGEN_MINOR_VERSION[ \t]+([0-9]+)" _eigen3_minor_version_match "${_eigen3_version_header}")
set(EIGEN3_MINOR_VERSION "${CMAKE_MATCH_1}")
set(EIGEN3_VERSION ${EIGEN3_WORLD_VERSION}.${EIGEN3_MAJOR_VERSION}.${EIGEN3_MINOR_VERSION})
if(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
set(EIGEN3_VERSION_OK FALSE)
else(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
set(EIGEN3_VERSION_OK TRUE)
endif(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
if(NOT EIGEN3_VERSION_OK)
message(STATUS "Eigen3 version ${EIGEN3_VERSION} found in ${EIGEN3_INCLUDE_DIR}, "
"but at least version ${Eigen3_FIND_VERSION} is required")
endif(NOT EIGEN3_VERSION_OK)
endmacro(_eigen3_check_version)
if (EIGEN3_INCLUDE_DIR)
# in cache already
_eigen3_check_version()
set(EIGEN3_FOUND ${EIGEN3_VERSION_OK})
else (EIGEN3_INCLUDE_DIR)
# search first if an Eigen3Config.cmake is available in the system,
# if successful this would set EIGEN3_INCLUDE_DIR and the rest of
# the script will work as usual
find_package(Eigen3 ${Eigen3_FIND_VERSION} NO_MODULE QUIET)
if(NOT EIGEN3_INCLUDE_DIR)
find_path(EIGEN3_INCLUDE_DIR NAMES signature_of_eigen3_matrix_library
HINTS
ENV EIGEN3_ROOT
ENV EIGEN3_ROOT_DIR
PATHS
${CMAKE_INSTALL_PREFIX}/include
${KDE4_INCLUDE_DIR}
PATH_SUFFIXES eigen3 eigen
)
endif(NOT EIGEN3_INCLUDE_DIR)
if(EIGEN3_INCLUDE_DIR)
_eigen3_check_version()
endif(EIGEN3_INCLUDE_DIR)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Eigen3 DEFAULT_MSG EIGEN3_INCLUDE_DIR EIGEN3_VERSION_OK)
mark_as_advanced(EIGEN3_INCLUDE_DIR)
endif(EIGEN3_INCLUDE_DIR)
# Copyright (c) 2011-2018, The DART development contributors
# All rights reserved.
#
# The list of contributors can be found at:
# https://github.com/dartsim/dart/blob/master/LICENSE
#
# This file is provided under the "BSD-style" License
# Find NLOPT
#
# This sets the following variables:
# NLOPT_FOUND
# NLOPT_INCLUDE_DIRS
# NLOPT_LIBRARIES
# NLOPT_DEFINITIONS
# NLOPT_VERSION
# 2018 : SMALL MODIFICATIONS FROM R.P. romain dot pacanowski @ institutoptique DOT fr
find_package(PkgConfig QUIET)
# Check to see if pkgconfig is installed.
pkg_check_modules(PC_NLOPT nlopt QUIET)
# Definitions
set(NLOPT_DEFINITIONS ${PC_NLOPT_CFLAGS_OTHER})
# Include directories
find_path(NLOPT_INCLUDE_DIRS
NAMES nlopt.h
HINTS ${PC_NLOPT_INCLUDEDIR}
PATHS "${CMAKE_INSTALL_PREFIX}/include" "C:/Program Files/NLOPT/include" )
# Libraries
find_library(NLOPT_LIBRARIES
NAMES nlopt nlopt_cxx
HINTS ${PC_NLOPT_LIBDIR} "C:/Program Files/NLOPT/lib")
# Version
set(NLOPT_VERSION ${PC_NLOPT_VERSION})
# Set (NAME)_FOUND if all the variables and the version are satisfied.
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(NLOPT
FAIL_MESSAGE DEFAULT_MSG
REQUIRED_VARS NLOPT_INCLUDE_DIRS NLOPT_LIBRARIES
VERSION_VAR NLOPT_VERSION)
\ No newline at end of file
/*!
\page contribute Contribute
\page contribute ALTA Developer Guide
You can contribute and expand ALTA by providing new plugins. The three kind
of plugins (\ref function, \ref data and \ref fitter) can be provided.
Examples of such plugins can be found in the `$ALTA/sources/plugins`
directory.
You can contribute and expand ALTA by providing new plugins. The three kind of
plugins (\ref functions "functions", \ref datas "datas" and \ref fitters
"fitters") can be provided. Examples of such plugins can be found in the
`$ALTA/sources/plugins` directory. We advise to use source version control to
communicate with the other developpers.
ALTA has two repositories: one for active development (hosted on Inria’s
gitlab) and another for continuous integration and pull-requests (hosted on
github). We advise internal developpers to use the gitlab platform (for
development branches) and external contributors to make forks using github.
## Local Configuration
To work easily with both remote repositories in your local repository, you need
to setup them as different remotes. When doing your initial clone, we advise to
set the name corresponding to the remote you are using. For example, to clone
the gitlab instance, use the following command:
$ git clone -o gitlab https://gitlab.inria.fr/alta/alta.git
To add the github remote (once you have a running local repository), you will
need to do:
$ git remote add github https://github.com/belcour/alta.git
Once both remotes are set, we advise the fetch all the tags and names to see
the various developpement branches. However, you only need to access both
master (gitlab and github) to work. If you are an external developper, forking
the github repository is sufficient.
## Create a New Branch (internal only)
Development branches should be created on the gitlab remote if possible. We
advise to sync with the master branch first
$ git checkout gitlab/master
$ git checkout -b my_branch --track gitlab/my_branch
## Continuous Integration & Merge Request
Push finished branches on the github remote for continuous integration or
create a pull request there:
$ git push -f github my_branch
Then, one of the internal developper will merge on the master and close the
pull request.
## Sync (internal only)
Once your changes are merged into github’s master branch and all the continuous
integration tests are green, you can sync the two remotes together by
forwarding gitlab master on top of github master:
$ git checkout gitlab/master
$ git reset --hard github/master
$ git push -f gitlab master
<center style="color:red">
Work in progress
</center>
*/
......@@ -3,11 +3,12 @@
<tab type="mainpage" visible="yes" title="Overview"/>
<tab type="user" url="@ref features" title="Features"/>
<tab type="user" url="@ref install" title="Installation"/>
<tab type="user" url="@ref tutorials" title="Tutorials"/>
<tab type="user" url="@ref documentation" title="API &amp; References" />
<tab type="user" url="@ref tools" title="Tools"/>
<tab type="user" url="@ref data" title="Data"/>
<tab type="user" url="@ref license" title="License"/>
<tab type="user" url="@ref tutorials" title="Tutorials"/>
<tab type="user" url="@ref documentation" title="API &amp; References" />
<tab type="user" url="@ref tools" title="Tools"/>
<tab type="user" url="@ref data" title="Data"/>
<tab type="user" url="@ref license" title="License"/>
<tab type="user" url="@ref contribute" title="Contribute"/>
<tab type="user" url="@ref contacts" title="Contacts"/>
</navindex>
......
......@@ -22,6 +22,7 @@ And Python scripts using:
+ <a href="tutorial3.html">BRDF Data conversion</a>
+ <a href="tutorial2.html">BRDF Rational fitting</a>
+ <a href="tutorial1.html">BRDF Non-Linear fitting</a>
+ <a href="tutorial-export.html">Use BRDF data and functions</a>
### Tutorial using the XML interface
......@@ -306,6 +307,27 @@ compareRational.update();
\page tutorial-export Export BRDF data and functions
It is possible to use ALTA BRDF fit or BRDF data inside different rendering engines. In this tutorial, we will see different ways to use the outputs of ALTA in different softwares. You can find the list of tools and renderers that can be used using ALTA's data or function in the \ref tools "tools page".
## Using BRDF Explorer
Disney's [BRDF Explorer][brdf-explorer] is a very good tool to check fits and export a first render. The simplest way to use a data or function from ALTA in this software is to convert them into a MERL data object. For example, if we have a BRDF function file `data.brdf`, we can use the following command to convert it to a MERL data file:
$ brdf2data --input function.brdf --output data.binary --data data_merl
Once the file `data.binary` is created, it can be viewed inside BRDF Explorer and compared to another BRDF file or shader for example.
It is also possible to generate a BRDF Explorer shader from an ALTA function when the functionality is encoded in the function plugin using the following command:
$ brdf2brdf --input function.brdf --output function.shader --export explorer
This will generate `function.shader`, a BRDF Explorer compatible shader.
[brdf-explorer]: https://www.disneyanimation.com/technology/brdf.html
\page xml-tutorial1 Using the XML interface
ALTA \ref commands "commands" can be performed using an XML specification, to simplify sharing of fitting, conversion and analysis procedures. The following script performs the fitting of the <tt>blue-metallic-paint</tt> from the MERL database using a Beckmann lobe (note there is no shadowing term, nor Fresnel term):
......
Subproject commit f117a48ea2fd446d2865826a58d08027d4579cb3
......@@ -24,7 +24,7 @@
using namespace alta;
//#define DEBUG
//#define DEBUG_CORE
//! Add dynamic library extension (.so or .dll) to a dynamic object as well as
......
......@@ -22,7 +22,7 @@
#include <cmath>
#include <utility>
using namespace alta;
namespace alta {
/*! \ingroup datas
* \ingroup plugins
......@@ -56,7 +56,7 @@ public: //methods
: vertical_segment(params, size, input_data)
{ }
ASTM(const parameters& params, size_t size)
ASTM(const alta::parameters& params, size_t size)
: vertical_segment(params, size)
{ }
};
......@@ -159,10 +159,9 @@ ALTA_DLL_EXPORT data* load_data(std::istream& input, const arguments& args)
{
assert(input.good());
std::getline(input, line);
if(line.size() == 0 || line.rfind(',') == std::string::npos)
continue;
do {
std::getline(input, line);
} while(line.size() == 0 || line.rfind(',') == std::string::npos);
std::replace(line.begin(), line.end(), ',', ' ');
......@@ -172,7 +171,9 @@ ALTA_DLL_EXPORT data* load_data(std::istream& input, const arguments& args)
for(int j = 0; j < n; ++j) {
stream >> data[i + j];
}
}
}
return new ASTM(params, size, std::shared_ptr<double>(data));
}
return new ASTM(params, n, std::shared_ptr<double>(data));
}
......@@ -19,6 +19,7 @@
#include <core/data.h>
#include <core/args.h>
#include <core/common.h>
#include <core/params.h>
using namespace alta;
......
<
......@@ -19,15 +19,17 @@
#include <cmath>
#include <core/common.h>
#include <core/params.h>
using namespace alta;
ALTA_DLL_EXPORT function* provide_function(const parameters& params)
ALTA_DLL_EXPORT function* provide_function(const alta::parameters& params)
{
return new lafortune_function(params);
}
lafortune_function::lafortune_function(const parameters& params)
lafortune_function::lafortune_function(const alta::parameters& params) :
nonlinear_function(params)
{
auto nY = params.dimY();
......@@ -50,7 +52,8 @@ vec lafortune_function::operator()(const vec& x) const
}
vec lafortune_function::value(const vec& x) const
{
vec res(dimY());
const int nY = _parameters.dimY();
vec res(nY);
#ifdef ADAPT_TO_PARAM
vec y(6);
......@@ -69,7 +72,7 @@ vec lafortune_function::value(const vec& x) const
#endif
// For each color channel
for(int i=0; i<dimY(); ++i)
for(int i=0; i<nY; ++i)
{
// Start with the diffuse term
res[i] = _kd[i];
......@@ -97,7 +100,8 @@ vec lafortune_function::value(const vec& x, const vec& p)
assert(p.size() == nbParameters());
setParameters(p);
vec res(dimY());
const int nY = _parameters.dimY();
vec res(nY);
#ifdef ADAPT_TO_PARAM
vec y(6);
......@@ -116,7 +120,7 @@ vec lafortune_function::value(const vec& x, const vec& p)
#endif
// For each lobe and for each color channel
for(int i=0; i<dimY(); ++i)
for(int i=0; i<nY; ++i)
{
// Start with the diffuse
res[i] = _kd[i];
......@@ -140,6 +144,7 @@ vec lafortune_function::value(const vec& x, const vec& p)
void lafortune_function::setNbLobes(int N)
{
_n = N;
const int _nY = _parameters.dimY();
// Update the length of the vectors
if(_isotropic)
......@@ -152,47 +157,49 @@ void lafortune_function::setNbLobes(int N)
//! Number of parameters to this non-linear function
int lafortune_function::nbParameters() const
{
const int nY = _parameters.dimY();
#ifdef FIT_DIFFUSE
if(_isotropic)
return (3*_n+1)*dimY();
return (3*_n+1)*nY;
else
return (4*_n+1)*dimY();
return (4*_n+1)*nY;
#else
if(_isotropic)
return (3*_n)*dimY();
return (3*_n)*nY;
else
return (4*_n)*dimY();
return (4*_n)*nY;
#endif
}
//! Get the vector of parameters for the function
vec lafortune_function::parameters() const
{
const int nY = _parameters.dimY();
vec res(nbParameters());
for(int n=0; n<_n; ++n)
for(int i=0; i<dimY(); ++i)
for(int i=0; i<nY; ++i)
{
if(_isotropic)
{
res[(n*dimY() + i)*3 + 0] = _C[(n*dimY() + i)*2 + 0];
res[(n*dimY() + i)*3 + 1] = _C[(n*dimY() + i)*2 + 1];
res[(n*dimY() + i)*3 + 2] = _N[n*dimY() + i];
res[(n*nY + i)*3 + 0] = _C[(n*nY + i)*2 + 0];
res[(n*nY + i)*3 + 1] = _C[(n*nY + i)*2 + 1];
res[(n*nY + i)*3 + 2] = _N[n*nY + i];
}
else
{
res[(n*dimY() + i)*4 + 0] = _C[(n*dimY() + i)*3 + 0];
res[(n*dimY() + i)*4 + 1] = _C[(n*dimY() + i)*3 + 1];
res[(n*dimY() + i)*4 + 2] = _C[(n*dimY() + i)*3 + 2];
res[(n*dimY() + i)*4 + 3] = _N[n*dimY() + i];
res[(n*nY + i)*4 + 0] = _C[(n*nY + i)*3 + 0];
res[(n*nY + i)*4 + 1] = _C[(n*nY + i)*3 + 1];
res[(n*nY + i)*4 + 2] = _C[(n*nY + i)*3 + 2];
res[(n*nY + i)*4 + 3] = _N[n*nY + i];
}
}
#ifdef FIT_DIFFUSE
for(int i=0; i<dimY(); ++i)
for(int i=0; i<nY; ++i)
{
if(_isotropic)
{
res[3*_n*dimY() + i] = _kd[i];
res[3*_n*nY + i] = _kd[i];
}
}
#endif
......@@ -202,21 +209,22 @@ vec lafortune_function::parameters() const
//! Update the vector of parameters for the function
void lafortune_function::setParameters(const vec& p)
{
const int nY = _parameters.dimY();
// Safety check the number of parameters
assert(p.size() == nbParameters());
for(int n=0; n<_n; ++n)
for(int i=0; i<dimY(); ++i)
for(int i=0; i<nY; ++i)
{
_C[(n*dimY() + i)*3 + 0] = p[(n*dimY() + i)*4 + 0];
_C[(n*dimY() + i)*3 + 1] = p[(n*dimY() + i)*4 + 1];
_C[(n*dimY() + i)*3 + 2] = p[(n*dimY() + i)*4 + 2];
_N[n*dimY() + i] = p[(n*dimY() + i)*4 + 3];
_C[(n*nY + i)*3 + 0] = p[(n*nY + i)*4 + 0];
_C[(n*nY + i)*3 + 1] = p[(n*nY + i)*4 + 1];
_C[(n*nY + i)*3 + 2] = p[(n*nY + i)*4 + 2];
_N[n*nY + i] = p[(n*nY + i)*4 + 3];
}
#ifdef FIT_DIFFUSE
for(int i=0; i<dimY(); ++i)
for(int i=0; i<nY; ++i)
{
_kd[i] = p[4*_n*dimY() + i];
_kd[i] = p[4*_n*nY + i];
}
#endif
}
......@@ -225,6 +233,7 @@ void lafortune_function::setParameters(const vec& p)
//! parameters.
vec lafortune_function::parametersJacobian(const vec& x) const
{
const int nY = _parameters.dimY();
#ifdef ADAPT_TO_PARAM
vec y(6);
......@@ -242,14 +251,14 @@ vec lafortune_function::parametersJacobian(const vec& x) const
dz = x[2]*x[5];
#endif
vec jac(dimY()*nbParameters());
for(int i=0; i<dimY(); ++i)
vec jac(nY*nbParameters());
for(int i=0; i<nY; ++i)
{
for(int n=0; n<_n; ++n)
for(int j=0; j<dimY(); ++j)
for(int j=0; j<nY; ++j)
{
// index of the current monochromatic lobe
int index = i*nbParameters() + 4*(n*dimY() + j);
int index = i*nbParameters() + 4*(n*nY + j);
double Cx, Cy, Cz, N;
getCurrentLobe(n, j, Cx, Cy, Cz, N);
......@@ -283,10 +292,10 @@ vec lafortune_function::parametersJacobian(const vec& x) const
}
#ifdef FIT_DIFFUSE
for(int j=0; j<dimY(); ++j)
for(int j=0; j<nY; ++j)
{
// index of the current monochromatic lobe
int index = i*nbParameters() + 4*_n*dimY() + j;
int index = i*nbParameters() + 4*_n*nY + j;
jac[index] = 1.0;
}
......@@ -298,31 +307,33 @@ vec lafortune_function::parametersJacobian(const vec& x) const
void lafortune_function::bootstrap(const ptr<data> d, const arguments& args)
{
const int nY = _parameters.dimY();
// Check the arguments for the number of lobes
this->setNbLobes(args.get_int("lobes", 1));
// Set the diffuse component
vec x0 = d->get(0);
for(int i=0; i<d->dimY(); ++i)
_kd[i] = x0[d->dimX() + i];
for(int i=0; i<d->parametrization().dimY(); ++i)
_kd[i] = x0[d->parametrization().dimX() + i];
for(int i=1; i<d->size(); ++i)
{
vec xi = d->get(i);
for(int j=0; j<d->dimY(); ++j)
_kd[j] = std::min(xi[d->dimX() + j], _kd[j]);
for(int j=0; j<d->parametrization().dimY(); ++j)
_kd[j] = std::min(xi[d->parametrization().dimX() + j], _kd[j]);
}
// The remaining data will be equal to one
for(int n=0; n<_n; ++n)
for(int i=0; i<dimY(); ++i)
for(int i=0; i<_parameters.dimY(); ++i)
{
double theta = 0.5 * M_PI * n / (double)_n;
_C[(n*dimY() + i)*3 + 0] = -sin(theta);
_C[(n*dimY() + i)*3 + 1] = -sin(theta);
_C[(n*dimY() + i)*3 + 2] = cos(theta);
_N[n*dimY() + i] = (double)_n;
_C[(n*nY + i)*3 + 0] = -sin(theta);
_C[(n*nY + i)*3 + 1] = -sin(theta);
_C[(n*nY + i)*3 + 2] = cos(theta);
_N[n*nY + i] = (double)_n;
}
}
......@@ -396,6 +407,7 @@ bool lafortune_function::load(std::istream& in)
setNbLobes(nb_lobes);