Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add step-wise optimization for the fixed-lag smoother (issue #300) #319

Open
wants to merge 5 commits into
base: devel
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions fuse_core/include/fuse_core/variable.h
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ class Variable
* @param[in] index The variable dimension of interest
* @return The lower bound for the requested variable dimension
*/
virtual double lowerBound(size_t index) const
virtual double lowerBound(size_t /* index */) const
{
return std::numeric_limits<double>::lowest();
}
Expand All @@ -338,7 +338,7 @@ class Variable
* @param[in] index The variable dimension of interest
* @return The upper bound for the requested variable dimension
*/
virtual double upperBound(size_t index) const
virtual double upperBound(size_t /* index */) const
{
return std::numeric_limits<double>::max();
}
Expand Down
24 changes: 24 additions & 0 deletions fuse_optimizers/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ add_compile_options(-Wall -Werror)
add_library(${PROJECT_NAME}
src/batch_optimizer.cpp
src/fixed_lag_smoother.cpp
src/step_wise_fixed_lag_smoother.cpp
src/optimizer.cpp
src/variable_stamp_index.cpp
)
Expand Down Expand Up @@ -98,6 +99,28 @@ set_target_properties(fixed_lag_smoother_node
CXX_STANDARD_REQUIRED YES
)

## step_wise_fixed_lag_smoother node
add_executable(step_wise_fixed_lag_smoother_node
src/step_wise_fixed_lag_smoother_node.cpp
)
add_dependencies(step_wise_fixed_lag_smoother_node
${catkin_EXPORTED_TARGETS}
)
target_include_directories(step_wise_fixed_lag_smoother_node
PRIVATE
include
${catkin_INCLUDE_DIRS}
)
target_link_libraries(step_wise_fixed_lag_smoother_node
${PROJECT_NAME}
${catkin_LIBRARIES}
)
set_target_properties(step_wise_fixed_lag_smoother_node
PROPERTIES
CXX_STANDARD 14
CXX_STANDARD_REQUIRED YES
)

#############
## Install ##
#############
Expand All @@ -114,6 +137,7 @@ install(
TARGETS
batch_optimizer_node
fixed_lag_smoother_node
step_wise_fixed_lag_smoother_node
RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ namespace fuse_optimizers
{

/**
* @brief Defines the set of parameters required by the fuse_optimizers::FixedLagSmoother class
* @brief Defines the set of parameters required by the fuse_optimizers::BatchOptimizer class
*/
struct BatchOptimizerParams
{
Expand Down
2 changes: 2 additions & 0 deletions fuse_optimizers/include/fuse_optimizers/fixed_lag_smoother.h
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,8 @@ class FixedLagSmoother : public Optimizer
*/
virtual ~FixedLagSmoother();

virtual void startOptimization();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you add a function description comment for startOptimization()?


protected:
/**
* Structure containing the information required to process a transaction after it was received.
Expand Down
175 changes: 175 additions & 0 deletions fuse_optimizers/include/fuse_optimizers/step_wise_fixed_lag_smoother.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
/*
* Software License Agreement (BSD License)
*
* Copyright (c) 2022, ARTI - Autonomous Robot Technology GmbH
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef FUSE_OPTIMIZERS_STEP_WISE_FIXED_LAG_SMOOTHER_H
#define FUSE_OPTIMIZERS_STEP_WISE_FIXED_LAG_SMOOTHER_H

#include <fuse_core/graph.h>
#include <fuse_core/transaction.h>
#include <fuse_optimizers/step_wise_fixed_lag_smoother_params.h>
#include <fuse_optimizers/fixed_lag_smoother.h>
#include <fuse_optimizers/variable_stamp_index.h>
#include <ros/ros.h>
#include <std_srvs/Empty.h>

#include <atomic>
#include <condition_variable>
#include <functional>
#include <mutex>
#include <string>
#include <thread>
#include <vector>


namespace fuse_optimizers
{

/**
* @brief A step-wise fixed-lag smoother implementation that marginalizes out variables that are older than a defined lag time
* and optimizes step-wise adding more and more constraints from different sources
*
* This implementation assumes that all added variable types are either derived from the fuse_variables::Stamped class,
* or are directly connected to at least one fuse_variables::Stamped variable via a constraint. The current time of
* the fixed-lag smoother is determined by the newest stamp of all added fuse_variables::Stamped variables.
*
* During optimization:
* (1) a set of new variables and constraints are computed
* (2) each optimization step adds the constraints with the defined source(s) to the graph
* (2) the augmented graph is optimized and the variable values are updated
* (3) steps 2 and 3 are continued until all optimization steps are completed
* (3) all motion models, sensors, and publishers are notified of the updated graph
* (4) all variables older than "current time - lag duration" are marginalized out.
*
* Optimization is performed at a fixed frequency, controlled by the \p optimization_frequency parameter. Received
* sensor transactions are queued while the optimization is processing, then applied to the graph at the start of the
* next optimization cycle. If the previous optimization is not yet complete when the optimization period elapses,
* then a warning will be logged but a new optimization will *not* be started. The previous optimization will run to
* completion, and the next optimization will not begin until the next scheduled optimization period.
*
* Parameters:
* - lag_duration (float, default: 5.0) The duration of the smoothing window in seconds
* - motion_models (struct array) The set of motion model plugins to load
* @code{.yaml}
* - name: string (A unique name for this motion model)
* type: string (The plugin loader class string for the desired motion model type)
* - ...
* @endcode
* - optimization_frequency (float, default: 10.0) The target frequency for optimization cycles. If an optimization
* takes longer than expected, an optimization cycle may be skipped.
* - publishers (struct array) The set of publisher plugins to load
* @code{.yaml}
* - name: string (A unique name for this publisher)
* type: string (The plugin loader class string for the desired publisher type)
* - ...
* @endcode
* - sensor_models (struct array) The set of sensor model plugins to load
* @code{.yaml}
* - name: string (A unique name for this sensor model)
* type: string (The plugin loader class string for the desired sensor model type)
* motion_models: [name1, name2, ...] (An optional list of motion model names that should be applied)
* - ...
* @endcode
* - transaction_timeout (float, default: 0.10) The maximum time to wait for motion models to be generated for a
* received transactions. Transactions are processes sequentially, so
* no new transactions will be added to the graph while waiting for
* motion models to be generated. Once the timeout expires, that
* transaction will be deleted from the queue.
* - optimization_steps (struct array) The set of sources that should be optimized for each optimization step
* It is recommended to always add the ignition source(s) and motion model source(s)
* to the first optimization step. The constraints and variables of the marginalization
* are always added to the first optimization step.
* @code{.yaml}
* - sources: string array (A list of strings of the names of the sources of the constraints.
* The names refer to the names that are used in the other parameters sensor_models and motion_models)
* - ...
* @endcode
* full example:
* @code{.yaml}
* optimization_steps:
* -
* sources:
* - "initial_localization_sensor"
* - "unicycle_motion_model"
* -
* sources:
* - "odometry_sensor"
* - "imu_sensor"
* -
* sources:
* - "scan_to_scan"
* -
* sources:
* - "scan_to_map"
* @endcode
*/
class StepWiseFixedLagSmoother : public FixedLagSmoother
{
public:
FUSE_SMART_PTR_DEFINITIONS(StepWiseFixedLagSmoother);
using ParameterType = StepWiseFixedLagSmootherParams;

/**
* @brief Constructor
*
* @param[in] graph The derived graph object. This allows different graph implementations to be used
* with the same optimizer code.
* @param[in] node_handle A node handle in the global namespace
* @param[in] private_node_handle A node handle in the node's private namespace
*/
StepWiseFixedLagSmoother(
fuse_core::Graph::UniquePtr graph,
const ros::NodeHandle& node_handle = ros::NodeHandle(),
const ros::NodeHandle& private_node_handle = ros::NodeHandle("~"));

/**
* @brief Destructor
*/
virtual ~StepWiseFixedLagSmoother();

/**
* @brief Function that optimizes all constraints, designed to be run in a separate thread.
*
* This function waits for an optimization or shutdown signal, then either calls optimize() or exits appropriately.
*/
void optimizationLoop();

void startOptimization() override;

protected:
// Read-only after construction
ParameterType params_; //!< Configuration settings for this fixed-lag smoother
};

} // namespace fuse_optimizers

#endif // FUSE_OPTIMIZERS_STEP_WISE_FIXED_LAG_SMOOTHER_H
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
/*
* Software License Agreement (BSD License)
*
* Copyright (c) 2022, ARTI - Autonomous Robot Technology GmbH
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef FUSE_OPTIMIZERS_STEP_WISE_FIXED_LAG_SMOOTHER_PARAMS_H
#define FUSE_OPTIMIZERS_STEP_WISE_FIXED_LAG_SMOOTHER_PARAMS_H

#include <fuse_core/ceres_options.h>
#include <fuse_core/parameter.h>
#include <ros/duration.h>
#include <ros/node_handle.h>

#include <fuse_optimizers/fixed_lag_smoother_params.h>

#include <ceres/solver.h>

#include <algorithm>
#include <string>
#include <vector>


namespace fuse_optimizers
{

/**
* @brief Defines the set of parameters required by the fuse_optimizers::StepWiseFixedLagSmoother class
*/
struct StepWiseFixedLagSmootherParams : FixedLagSmootherParams
{
public:
/**
* @brief The ordered list of steps for the optimization with each source inside
*
* The optimization is performed sequentially where each step adds
* more constraints from different sources to the graph.
*/
std::vector<std::vector<std::string>> optimization_steps;

/**
* @brief Method for loading parameter values from ROS.
*
* @param[in] nh - The ROS node handle with which to load parameters
*/
void loadFromROS(const ros::NodeHandle& nh)
{
FixedLagSmootherParams::loadFromROS(nh);

// load optimization steps
XmlRpc::XmlRpcValue optimization_steps_raw;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you add an include for XmlRpcValue?

fuse_core::getParamRequired<XmlRpc::XmlRpcValue>(nh, "optimization_steps", optimization_steps_raw);

ROS_ASSERT(optimization_steps_raw.getType() == XmlRpc::XmlRpcValue::TypeArray);

for (int step_i = 0; step_i < optimization_steps_raw.size(); ++step_i)
{
ROS_ASSERT(optimization_steps_raw[step_i].getType() == XmlRpc::XmlRpcValue::TypeStruct);

ROS_ASSERT(optimization_steps_raw[step_i]["sources"].getType() == XmlRpc::XmlRpcValue::TypeArray);

std::vector<std::string> optimization_step_sources;

for (int source_i = 0; source_i < optimization_steps_raw[step_i]["sources"].size(); ++source_i)
{
ROS_ASSERT(optimization_steps_raw[step_i]["sources"][source_i].getType() == XmlRpc::XmlRpcValue::TypeString);

optimization_step_sources.push_back(
static_cast<std::string>(optimization_steps_raw[step_i]["sources"][source_i]));
}

optimization_steps.push_back(optimization_step_sources);
}
}
};

} // namespace fuse_optimizers

#endif // FUSE_OPTIMIZERS_STEP_WISE_FIXED_LAG_SMOOTHER_PARAMS_H
15 changes: 9 additions & 6 deletions fuse_optimizers/src/fixed_lag_smoother.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,15 @@ FixedLagSmoother::FixedLagSmoother(
// Test for auto-start
autostart();

// Advertise a service that resets the optimizer to its initial state
reset_service_server_ = node_handle_.advertiseService(
ros::names::resolve(params_.reset_service),
&FixedLagSmoother::resetServiceCallback,
this);
}

void FixedLagSmoother::startOptimization()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should probably initialize the optimization_running_ flag to false in the constructor, and then set it to true in the startOptimization() method. Just to adhere to the principle of least surprise.

{
// Start the optimization thread
optimization_thread_ = std::thread(&FixedLagSmoother::optimizationLoop, this);

Expand All @@ -101,12 +110,6 @@ FixedLagSmoother::FixedLagSmoother(
params_.optimization_period,
&FixedLagSmoother::optimizerTimerCallback,
this);

// Advertise a service that resets the optimizer to its initial state
reset_service_server_ = node_handle_.advertiseService(
ros::names::resolve(params_.reset_service),
&FixedLagSmoother::resetServiceCallback,
this);
}

FixedLagSmoother::~FixedLagSmoother()
Expand Down
Loading