Skip to content

Commit

Permalink
no-std
Browse files Browse the repository at this point in the history
  • Loading branch information
strasdat committed Oct 27, 2024
1 parent fc19f20 commit ba92425
Show file tree
Hide file tree
Showing 84 changed files with 646 additions and 538 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,21 @@ jobs:
run: ${SCCACHE_PATH} --show-stats
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2024-09-01
toolchain: nightly
- uses: extractions/setup-just@v1
- name: cargo build
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
run: |
rustup default nightly-2024-09-01
rustup default nightly
cargo build --release --all-targets --all-features
- name: cargo test
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
run: |
rustup default nightly-2024-09-01
cargo test --release --all-features
just test
- name: Run sccache stat for check after
shell: bash
run: ${SCCACHE_PATH} --show-stats
2 changes: 2 additions & 0 deletions .github/workflows/stable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ jobs:
- name: Run sccache stat for check before
shell: bash
run: ${SCCACHE_PATH} --show-stats
- uses: extractions/setup-just@v1
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
Expand All @@ -27,6 +28,7 @@ jobs:
RUSTC_WRAPPER: "sccache"
run: |
cargo build --release --all-targets
just build-std
- name: cargo test
env:
SCCACHE_GHA_ENABLED: "true"
Expand Down
10 changes: 1 addition & 9 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,28 +34,20 @@ sophus_sensor = {path = "crates/sophus_sensor", version = "0.10.0"}

approx = "0.5"
as-any = "0.3"
assertables = "8.3"
async-trait = "0.1"
bytemuck = {version = "1.14", features = ["derive"]}
concat-arrays = "0.1"
dyn-clone = "1.0"
env_logger = "0.11"
faer = "0.19"
image = {version = "0.25", features = [
"jpeg",
"png",
"tiff",
]}
linked-hash-map = "0.5"
log = "0.4"
nalgebra = {version = "0.33", features = ["rand", "bytemuck"]}
ndarray = {version = "0.15", features = ["approx-0_5"]}
num-traits = "0.2"
numpy = "0.21"
png = "0.17"
rand = "0.8"
thiserror = "1.0"
tiff = "0.9.0"
snafu = "0.8.5"
typenum = {version = "1.17", features = ["const-generics"]}
winit = {version = "0.30", features = ["android-native-activity"]}

Expand Down
1 change: 1 addition & 0 deletions crates/sophus/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,4 @@ simd = [
"sophus_opt/simd",
"sophus_sensor/simd",
]
std = []
4 changes: 2 additions & 2 deletions crates/sophus_core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,16 @@ version.workspace = true

[dependencies]
approx.workspace = true
assertables.workspace = true
concat-arrays.workspace = true
log.workspace = true
nalgebra.workspace = true
ndarray.workspace = true
num-traits.workspace = true
thiserror.workspace = true
snafu.workspace = true
typenum.workspace = true

sleef = {version = "0.3", optional = true}

[features]
simd = ["sleef"]
std = []
18 changes: 9 additions & 9 deletions crates/sophus_core/src/calculus/dual/dual_batch_matrix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@ use crate::tensor::mut_tensor::MutTensorDDR;
use crate::tensor::mut_tensor::MutTensorDDRC;
use approx::AbsDiffEq;
use approx::RelativeEq;
use core::fmt::Debug;
use core::ops::Add;
use core::ops::Mul;
use core::ops::Neg;
use core::ops::Sub;
use core::simd::LaneCount;
use core::simd::Mask;
use core::simd::SupportedLaneCount;
use num_traits::Zero;
use std::fmt::Debug;
use std::ops::Add;
use std::ops::Mul;
use std::ops::Neg;
use std::ops::Sub;
use std::simd::LaneCount;
use std::simd::Mask;
use std::simd::SupportedLaneCount;

use crate::calculus::dual::dual_matrix::DijPairM;

Expand Down Expand Up @@ -686,7 +686,7 @@ impl<const ROWS: usize, const COLS: usize, const BATCH: usize> Debug
where
LaneCount<BATCH>: SupportedLaneCount,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
if self.dij_part.is_some() {
f.debug_struct("DualScalarLike")
.field("val", &self.real_part)
Expand Down
27 changes: 15 additions & 12 deletions crates/sophus_core/src/calculus/dual/dual_batch_scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,25 @@ use crate::linalg::EPS_F64;
use crate::prelude::*;
use crate::tensor::mut_tensor::InnerScalarToVec;
use crate::tensor::mut_tensor::MutTensorDD;
use alloc::vec::Vec;
use approx::assert_abs_diff_eq;
use approx::AbsDiffEq;
use approx::RelativeEq;
use core::fmt::Debug;
use core::ops::Add;
use core::ops::AddAssign;
use core::ops::Div;
use core::ops::Mul;
use core::ops::Neg;
use core::ops::Sub;
use core::ops::SubAssign;
use core::simd::LaneCount;
use core::simd::Mask;
use core::simd::SupportedLaneCount;
use num_traits::One;
use num_traits::Zero;
use std::fmt::Debug;
use std::ops::Add;
use std::ops::AddAssign;
use std::ops::Div;
use std::ops::Mul;
use std::ops::Neg;
use std::ops::Sub;
use std::ops::SubAssign;
use std::simd::LaneCount;
use std::simd::Mask;
use std::simd::SupportedLaneCount;

extern crate alloc;

/// Dual number - a real number and an infinitesimal number (batch version)
#[derive(Clone)]
Expand Down Expand Up @@ -267,7 +270,7 @@ where
BatchScalarF64<BATCH>: IsCoreScalar,
LaneCount<BATCH>: SupportedLaneCount,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
if self.dij_part.is_some() {
f.debug_struct("DualScalar")
.field("val", &self.real_part)
Expand Down
14 changes: 7 additions & 7 deletions crates/sophus_core/src/calculus/dual/dual_batch_vector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ use crate::tensor::mut_tensor::MutTensorDD;
use crate::tensor::mut_tensor::MutTensorDDR;
use approx::AbsDiffEq;
use approx::RelativeEq;
use std::fmt::Debug;
use std::ops::Add;
use std::ops::Neg;
use std::ops::Sub;
use std::simd::LaneCount;
use std::simd::Mask;
use std::simd::SupportedLaneCount;
use core::fmt::Debug;
use core::ops::Add;
use core::ops::Neg;
use core::ops::Sub;
use core::simd::LaneCount;
use core::simd::Mask;
use core::simd::SupportedLaneCount;

/// Dual vector (batch version)
#[derive(Clone, Debug)]
Expand Down
12 changes: 6 additions & 6 deletions crates/sophus_core/src/calculus/dual/dual_matrix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ use crate::tensor::mut_tensor::MutTensorDDR;
use crate::tensor::mut_tensor::MutTensorDDRC;
use approx::AbsDiffEq;
use approx::RelativeEq;
use core::fmt::Debug;
use core::ops::Add;
use core::ops::Mul;
use core::ops::Neg;
use core::ops::Sub;
use num_traits::Zero;
use std::fmt::Debug;
use std::ops::Add;
use std::ops::Mul;
use std::ops::Neg;
use std::ops::Sub;

/// DualScalarLike matrix
#[derive(Clone)]
Expand All @@ -25,7 +25,7 @@ pub struct DualMatrix<const ROWS: usize, const COLS: usize> {
}

impl<const ROWS: usize, const COLS: usize> Debug for DualMatrix<ROWS, COLS> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
if self.dij_part.is_some() {
f.debug_struct("DualScalarLike")
.field("val", &self.real_part)
Expand Down
26 changes: 12 additions & 14 deletions crates/sophus_core/src/calculus/dual/dual_scalar.rs
Original file line number Diff line number Diff line change
@@ -1,28 +1,26 @@
use super::dual_matrix::DualMatrix;
use super::dual_vector::DualVector;

use crate::linalg::scalar::NumberCategory;

use crate::linalg::MatF64;
use crate::linalg::VecF64;
use crate::linalg::EPS_F64;
use crate::prelude::*;
use crate::tensor::mut_tensor::InnerScalarToVec;
use crate::tensor::mut_tensor::MutTensorDD;

use approx::AbsDiffEq;
use approx::RelativeEq;
use core::fmt::Debug;
use core::ops::Add;
use core::ops::AddAssign;
use core::ops::Div;
use core::ops::Mul;
use core::ops::Neg;
use core::ops::Sub;
use core::ops::SubAssign;
use num_traits::One;
use num_traits::Zero;

use std::fmt::Debug;
use std::ops::Add;
use std::ops::AddAssign;
use std::ops::Div;
use std::ops::Mul;
use std::ops::Neg;
use std::ops::Sub;
use std::ops::SubAssign;
extern crate alloc;

/// Trait for dual numbers
pub trait IsDual {}
Expand Down Expand Up @@ -234,7 +232,7 @@ impl PartialEq for DualScalar {
}

impl PartialOrd for DualScalar {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
self.real_part.partial_cmp(&other.real_part)
}
}
Expand All @@ -246,7 +244,7 @@ impl From<f64> for DualScalar {
}

impl Debug for DualScalar {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
if self.dij_part.is_some() {
f.debug_struct("DualScalar")
.field("val", &self.real_part)
Expand Down Expand Up @@ -467,7 +465,7 @@ impl IsScalar<1> for DualScalar {
}
}

fn scalar_examples() -> Vec<Self> {
fn scalar_examples() -> alloc::vec::Vec<Self> {
[1.0, 2.0, 3.0].iter().map(|&v| Self::from_f64(v)).collect()
}

Expand Down
10 changes: 5 additions & 5 deletions crates/sophus_core/src/calculus/dual/dual_vector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ use crate::tensor::mut_tensor::MutTensorDD;
use crate::tensor::mut_tensor::MutTensorDDR;
use approx::AbsDiffEq;
use approx::RelativeEq;
use std::fmt::Debug;
use std::ops::Add;
use std::ops::Neg;
use std::ops::Sub;
use core::fmt::Debug;
use core::ops::Add;
use core::ops::Neg;
use core::ops::Sub;

/// Dual vector
#[derive(Clone)]
Expand Down Expand Up @@ -228,7 +228,7 @@ impl<const ROWS: usize> Add for DualVector<ROWS> {
}

impl<const ROWS: usize> Debug for DualVector<ROWS> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
if self.dij_part.is_some() {
f.debug_struct("DualScalarLike")
.field("val", &self.real_part)
Expand Down
6 changes: 3 additions & 3 deletions crates/sophus_core/src/calculus/maps/curves.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use nalgebra::SVector;
///
/// f: ℝ -> ℝ
pub struct ScalarValuedCurve<S: IsScalar<BATCH>, const BATCH: usize> {
phantom: std::marker::PhantomData<S>,
phantom: core::marker::PhantomData<S>,
}

impl<S: IsScalar<BATCH>, const BATCH: usize> ScalarValuedCurve<S, BATCH> {
Expand Down Expand Up @@ -44,7 +44,7 @@ impl<D: IsDualScalar<BATCH>, const BATCH: usize> ScalarValuedCurve<D, BATCH> {
///
/// f: ℝ -> ℝʳ
pub struct VectorValuedCurve<S: IsScalar<BATCH>, const BATCH: usize> {
phantom: std::marker::PhantomData<S>,
phantom: core::marker::PhantomData<S>,
}

impl<S: IsScalar<BATCH>, const BATCH: usize> VectorValuedCurve<S, BATCH> {
Expand Down Expand Up @@ -79,7 +79,7 @@ impl<D: IsDualScalar<BATCH>, const BATCH: usize> VectorValuedCurve<D, BATCH> {
/// This is a function which takes a scalar and returns a matrix:
/// f: ℝ -> ℝʳ x ℝᶜ
pub struct MatrixValuedCurve<S: IsScalar<BATCH>, const BATCH: usize> {
phantom: std::marker::PhantomData<S>,
phantom: core::marker::PhantomData<S>,
}

impl<S: IsScalar<BATCH>, const BATCH: usize> MatrixValuedCurve<S, BATCH> {
Expand Down
6 changes: 3 additions & 3 deletions crates/sophus_core/src/calculus/maps/matrix_valued_maps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ use crate::linalg::SMat;
use crate::prelude::*;
use crate::tensor::mut_tensor::MutTensorDDRC;
use crate::tensor::mut_tensor::MutTensorDRC;
use core::marker::PhantomData;
use nalgebra::SMatrix;
use std::marker::PhantomData;

/// Matrix-valued map on a vector space.
///
Expand All @@ -12,7 +12,7 @@ use std::marker::PhantomData;
/// f: ℝᵐ -> ℝʳ x ℝᶜ
///
pub struct MatrixValuedMapFromVector<S: IsScalar<BATCH>, const BATCH: usize> {
phantom: std::marker::PhantomData<S>,
phantom: core::marker::PhantomData<S>,
}

impl<S: IsRealScalar<BATCH, RealScalar = S>, const BATCH: usize>
Expand Down Expand Up @@ -82,7 +82,7 @@ impl<D: IsDualScalar<BATCH, DualScalar = D>, const BATCH: usize>
/// f: ℝᵐ x ℝⁿ -> ℝʳ x ℝᶜ
///
pub struct MatrixValuedMapFromMatrix<S: IsScalar<BATCH>, const BATCH: usize> {
phantom: std::marker::PhantomData<S>,
phantom: core::marker::PhantomData<S>,
}

impl<S: IsRealScalar<BATCH, RealScalar = S>, const BATCH: usize>
Expand Down
4 changes: 2 additions & 2 deletions crates/sophus_core/src/calculus/maps/scalar_valued_maps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::tensor::mut_tensor::MutTensorDD;
/// These functions are also called a scalar fields (on vector spaces).
///
pub struct ScalarValuedMapFromVector<S: IsScalar<BATCH>, const BATCH: usize> {
phantom: std::marker::PhantomData<S>,
phantom: core::marker::PhantomData<S>,
}

impl<S: IsRealScalar<BATCH>, const BATCH: usize> ScalarValuedMapFromVector<S, BATCH> {
Expand Down Expand Up @@ -73,7 +73,7 @@ impl<D: IsDualScalar<BATCH>, const BATCH: usize> ScalarValuedMapFromVector<D, BA
///
/// f: ℝᵐ x ℝⁿ -> ℝ
pub struct ScalarValuedMapFromMatrix<S: IsScalar<BATCH>, const BATCH: usize> {
phantom: std::marker::PhantomData<S>,
phantom: core::marker::PhantomData<S>,
}

impl<S: IsRealScalar<BATCH>, const BATCH: usize> ScalarValuedMapFromMatrix<S, BATCH> {
Expand Down
Loading

0 comments on commit ba92425

Please sign in to comment.