/*! Assorted helpers */
use std::rc::Rc;
use crate::float_ord::FloatOrd;
use std::borrow::Borrow;
use std::cmp::{Ordering, PartialOrd};
use std::hash::{ Hash, Hasher };
use std::ops::Mul;
pub mod c {
use super::*;
use std::cell::RefCell;
use std::ffi::{ CStr, CString };
use std::os::raw::c_char;
use std::rc::Rc;
use std::str::Utf8Error;
use std::sync::{Arc, Mutex};
// traits
use std::borrow::ToOwned;
// The lifetime on input limits the existence of the result
pub fn as_str(s: &*const c_char) -> Result, Utf8Error> {
if s.is_null() {
Ok(None)
} else {
unsafe {CStr::from_ptr(*s)}
.to_str()
.map(Some)
}
}
pub fn as_cstr(s: &*const c_char) -> Option<&CStr> {
if s.is_null() {
None
} else {
Some(unsafe {CStr::from_ptr(*s)})
}
}
pub fn into_cstring(s: *const c_char) -> Result , std::ffi::NulError> {
if s.is_null() {
Ok(None)
} else {
CString::new(
unsafe {CStr::from_ptr(s)}.to_bytes()
).map(Some)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::ptr;
#[test]
fn test_null_cstring() {
assert_eq!(into_cstring(ptr::null()), Ok(None))
}
#[test]
fn test_null_str() {
assert_eq!(as_str(&ptr::null()), Ok(None))
}
}
/// Marker trait for values that can be transferred to/received from C.
/// They must be either *const or *mut or repr(transparent).
pub trait COpaquePtr {}
/// Wraps structures to pass them safely to/from C
/// Since C doesn't respect borrowing rules,
/// RefCell will enforce them dynamically (only 1 writer/many readers)
/// Rc is implied and will ensure timely dropping
#[repr(transparent)]
pub struct Wrapped(*const RefCell);
// It would be nice to implement `Borrow`
// directly on the raw pointer to avoid one conversion call,
// but the `borrow()` call needs to extract a `Rc`,
// and at the same time return a reference to it (`std::cell::Ref`)
// to take advantage of `Rc::borrow()` runtime checks.
// Unfortunately, that needs a `Ref` struct with self-referential fields,
// which is a bit too complex for now.
impl Wrapped {
pub fn new(value: T) -> Wrapped {
Wrapped::wrap(Rc::new(RefCell::new(value)))
}
pub fn wrap(state: Rc>) -> Wrapped {
Wrapped(Rc::into_raw(state))
}
/// Extracts the reference to the data.
/// It may cause problems if attempted in more than one place
pub unsafe fn unwrap(self) -> Rc> {
Rc::from_raw(self.0)
}
/// Creates a new Rc reference to the same data.
/// Use for accessing the underlying data as a reference.
pub fn clone_ref(&self) -> Rc> {
// A bit dangerous: the Rc may be in use elsewhere
let used_rc = unsafe { Rc::from_raw(self.0) };
let rc = used_rc.clone();
Rc::into_raw(used_rc); // prevent dropping the original reference
rc
}
}
impl Clone for Wrapped {
fn clone(&self) -> Wrapped {
Wrapped::wrap(self.clone_ref())
}
}
/// ToOwned won't work here
/// because it's really difficult to implement Borrow on Wrapped
/// with the Rc> chain on the way to the data
impl CloneOwned for Wrapped {
type Owned = T;
fn clone_owned(&self) -> T {
let rc = self.clone_ref();
let r = RefCell::borrow(&rc);
r.to_owned()
}
}
impl COpaquePtr for Wrapped {}
/// Similar to Wrapped, except thread-safe.
#[repr(transparent)]
pub struct ArcWrapped(*const Mutex);
impl ArcWrapped {
pub fn new(value: T) -> Self {
Self::wrap(Arc::new(Mutex::new(value)))
}
pub fn wrap(state: Arc>) -> Self {
Self(Arc::into_raw(state))
}
/// Extracts the reference to the data.
/// It may cause problems if attempted in more than one place
pub unsafe fn unwrap(self) -> Arc> {
Arc::from_raw(self.0)
}
/// Creates a new Rc reference to the same data.
/// Use for accessing the underlying data as a reference.
pub fn clone_ref(&self) -> Arc> {
// A bit dangerous: the Rc may be in use elsewhere
let used_rc = unsafe { Arc::from_raw(self.0) };
let rc = used_rc.clone();
let _ = Arc::into_raw(used_rc); // prevent dropping the original reference
rc
}
}
impl Clone for ArcWrapped {
fn clone(&self) -> Self {
Self::wrap(self.clone_ref())
}
}
/// ToOwned won't work here
impl CloneOwned for ArcWrapped {
type Owned = T;
fn clone_owned(&self) -> T {
let rc = self.clone_ref();
// FIXME: this panic here is inelegant.
// It will only happen in case of crashes elsewhere, but still.
let r = rc.lock().unwrap();
r.to_owned()
}
}
}
/// Clones the underlying data structure, like ToOwned.
pub trait CloneOwned {
type Owned;
fn clone_owned(&self) -> Self::Owned;
}
pub fn find_max_double(iterator: I, get: F)
-> f64
where I: Iterator- ,
F: Fn(&T) -> f64
{
iterator.map(|value| FloatOrd(get(&value)))
.max().unwrap_or(FloatOrd(0f64))
.0
}
pub trait DivCeil
{
type Output;
fn div_ceil(self, rhs: Rhs) -> Self::Output;
}
/// Newer Rust introduces this natively,
/// but we don't always have newer Rust.
impl DivCeil for i32 {
type Output = Self;
fn div_ceil(self, other: i32) -> Self::Output {
let d = self / other;
let m = self % other;
if m == 0 { d } else { d + 1}
}
}
#[derive(Debug, Clone, Copy)]
pub struct Rational {
pub numerator: T,
pub denominator: u32,
}
impl> Rational {
pub fn ceil(self) -> U {
self.numerator.div_ceil(self.denominator as i32)
}
}
impl> Mul for Rational {
type Output = Self;
fn mul(self, m: i32) -> Self {
Self {
numerator: self.numerator * m,
denominator: self.denominator,
}
}
}
impl> Mul> for Rational {
type Output = Self;
fn mul(self, m: Rational) -> Self {
Self {
numerator: self.numerator * m.numerator,
denominator: self.denominator * m.denominator,
}
}
}
impl PartialEq for Rational {
fn eq(&self, other: &Self) -> bool {
(self.denominator as i64).saturating_mul(other.numerator as i64)
== (other.denominator as i64).saturating_mul(self.numerator as i64)
}
}
impl Eq for Rational {}
impl Ord for Rational {
fn cmp(&self, other: &Self) -> Ordering {
// Using 64-bit values to make overflows unlikely.
// If i32_max * u32_max can exceed i64_max,
// then this is actually PartialOrd.
// Saturating mul used just to avoid propagating mistakes.
(other.denominator as i64).saturating_mul(self.numerator as i64)
.cmp(
&(self.denominator as i64).saturating_mul(other.numerator as i64)
)
}
}
impl PartialOrd for Rational {
fn partial_cmp(&self, other: &Self) -> Option {
Some(self.cmp(other))
}
}
/// Compares pointers but not internal values of Rc
pub struct Pointer(pub Rc);
impl Pointer {
pub fn new(value: T) -> Self {
Pointer(Rc::new(value))
}
}
impl Hash for Pointer {
fn hash(&self, state: &mut H) {
(&*self.0 as *const T).hash(state);
}
}
impl PartialEq for Pointer {
fn eq(&self, other: &Pointer) -> bool {
Rc::ptr_eq(&self.0, &other.0)
}
}
impl Eq for Pointer {}
impl Clone for Pointer {
fn clone(&self) -> Self {
Pointer(self.0.clone())
}
}
impl Borrow> for Pointer {
fn borrow(&self) -> &Rc {
&self.0
}
}
pub trait WarningHandler {
/// Handle a warning
fn handle(&mut self, warning: &str);
}
/// Removes the first matcing item
pub fn vec_remove bool>(v: &mut Vec, pred: F) -> Option {
let idx = v.iter().position(pred);
idx.map(|idx| v.remove(idx))
}
/// Repeats all the items of the iterator forever,
/// but returns the cycle number alongside.
/// Inefficient due to all the vectors, but doesn't have to be fast.
pub fn cycle_count>(iter: I)
-> impl Iterator-
{
let numbered_copies = vec![iter].into_iter()
.cycle()
.enumerate();
numbered_copies.flat_map(|(idx, cycle)|
// Pair each element from the cycle with a copy of the index.
cycle.zip(
vec![idx].into_iter().cycle() // Repeat the index forever.
)
)
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashSet;
#[test]
fn check_set() {
let mut s = HashSet::new();
let first = Rc::new(1u32);
s.insert(Pointer(first.clone()));
assert_eq!(s.insert(Pointer(Rc::new(2u32))), true);
assert_eq!(s.remove(&Pointer(first)), true);
}
#[test]
fn check_count() {
assert_eq!(
cycle_count(5..8).take(7).collect::
>(),
vec![(5, 0), (6, 0), (7, 0), (5, 1), (6, 1), (7, 1), (5, 2)]
);
}
#[test]
fn check_rational_cmp() {
assert_eq!(
Rational { numerator: 1, denominator: 1 },
Rational { numerator: 1, denominator: 1 },
);
assert_eq!(
Rational { numerator: 1, denominator: 1 },
Rational { numerator: 2, denominator: 2 },
);
assert!(
Rational { numerator: 1, denominator: 1 }
< Rational { numerator: 2, denominator: 1 }
);
}
}