Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

types: use slab allocator for type bounds #231

Merged
merged 10 commits into from
Jul 4, 2024
3 changes: 2 additions & 1 deletion src/bit_encoding/bitwriter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,13 @@ mod tests {
use super::*;
use crate::jet::Core;
use crate::node::CoreConstructible;
use crate::types;
use crate::ConstructNode;
use std::sync::Arc;

#[test]
fn vec() {
let program = Arc::<ConstructNode<Core>>::unit();
let program = Arc::<ConstructNode<Core>>::unit(&types::Context::new());
let _ = write_to_vec(|w| program.encode(w));
}

Expand Down
14 changes: 8 additions & 6 deletions src/bit_encoding/decode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use crate::node::{
ConstructNode, CoreConstructible, DisconnectConstructible, JetConstructible, NoWitness,
WitnessConstructible,
};
use crate::types;
use crate::{BitIter, FailEntropy, Value};
use std::collections::HashSet;
use std::sync::Arc;
Expand Down Expand Up @@ -178,6 +179,7 @@ pub fn decode_expression<I: Iterator<Item = u8>, J: Jet>(
return Err(Error::TooManyNodes(len));
}

let inference_context = types::Context::new();
let mut nodes = Vec::with_capacity(len);
for _ in 0..len {
let new_node = decode_node(bits, nodes.len())?;
Expand All @@ -195,8 +197,8 @@ pub fn decode_expression<I: Iterator<Item = u8>, J: Jet>(
}

let new = match nodes[data.node.0] {
DecodeNode::Unit => Node(ArcNode::unit()),
DecodeNode::Iden => Node(ArcNode::iden()),
DecodeNode::Unit => Node(ArcNode::unit(&inference_context)),
DecodeNode::Iden => Node(ArcNode::iden(&inference_context)),
DecodeNode::InjL(i) => Node(ArcNode::injl(converted[i].get()?)),
DecodeNode::InjR(i) => Node(ArcNode::injr(converted[i].get()?)),
DecodeNode::Take(i) => Node(ArcNode::take(converted[i].get()?)),
Expand All @@ -222,16 +224,16 @@ pub fn decode_expression<I: Iterator<Item = u8>, J: Jet>(
converted[i].get()?,
&Some(Arc::clone(converted[j].get()?)),
)?),
DecodeNode::Witness => Node(ArcNode::witness(NoWitness)),
DecodeNode::Fail(entropy) => Node(ArcNode::fail(entropy)),
DecodeNode::Witness => Node(ArcNode::witness(&inference_context, NoWitness)),
DecodeNode::Fail(entropy) => Node(ArcNode::fail(&inference_context, entropy)),
DecodeNode::Hidden(cmr) => {
if !hidden_set.insert(cmr) {
return Err(Error::SharingNotMaximal);
}
Hidden(cmr)
}
DecodeNode::Jet(j) => Node(ArcNode::jet(j)),
DecodeNode::Word(ref w) => Node(ArcNode::const_word(Arc::clone(w))),
DecodeNode::Jet(j) => Node(ArcNode::jet(&inference_context, j)),
DecodeNode::Word(ref w) => Node(ArcNode::const_word(&inference_context, Arc::clone(w))),
};
converted.push(new);
}
Expand Down
22 changes: 13 additions & 9 deletions src/human_encoding/named_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ impl<J: Jet> NamedCommitNode<J> {
struct Populator<'a, J: Jet> {
witness_map: &'a HashMap<Arc<str>, Arc<Value>>,
disconnect_map: &'a HashMap<Arc<str>, Arc<NamedCommitNode<J>>>,
inference_context: types::Context,
phantom: PhantomData<J>,
}

Expand Down Expand Up @@ -153,17 +154,16 @@ impl<J: Jet> NamedCommitNode<J> {
// Like witness nodes (see above), disconnect nodes may be pruned later.
// The finalization will detect missing branches and throw an error.
let maybe_commit = self.disconnect_map.get(hole_name);
// FIXME: Recursive call of to_witness_node
// We cannot introduce a stack
// because we are implementing methods of the trait Converter
// which are used Marker::convert().
// FIXME: recursive call to convert
// We cannot introduce a stack because we are implementing the Converter
// trait and do not have access to the actual algorithm used for conversion
// in order to save its state.
//
// OTOH, if a user writes a program with so many disconnected expressions
// that there is a stack overflow, it's his own fault :)
// This would fail in a fuzz test.
let witness = maybe_commit.map(|commit| {
commit.to_witness_node(self.witness_map, self.disconnect_map)
});
// This may fail in a fuzz test.
let witness = maybe_commit
.map(|commit| commit.convert::<InternalSharing, _, _>(self).unwrap());
Ok(witness)
}
}
Expand All @@ -181,13 +181,15 @@ impl<J: Jet> NamedCommitNode<J> {
let inner = inner
.map(|node| node.cached_data())
.map_witness(|maybe_value| maybe_value.clone());
Ok(WitnessData::from_inner(inner).expect("types are already finalized"))
Ok(WitnessData::from_inner(&self.inference_context, inner)
.expect("types are already finalized"))
}
}

self.convert::<InternalSharing, _, _>(&mut Populator {
witness_map: witness,
disconnect_map: disconnect,
inference_context: types::Context::new(),
phantom: PhantomData,
})
.unwrap()
Expand Down Expand Up @@ -245,13 +247,15 @@ pub struct NamedConstructData<J> {
impl<J: Jet> NamedConstructNode<J> {
/// Construct a named construct node from parts.
pub fn new(
inference_context: &types::Context,
name: Arc<str>,
position: Position,
user_source_types: Arc<[types::Type]>,
user_target_types: Arc<[types::Type]>,
inner: node::Inner<Arc<Self>, J, Arc<Self>, WitnessOrHole>,
) -> Result<Self, types::Error> {
let construct_data = ConstructData::from_inner(
inference_context,
inner
.as_ref()
.map(|data| &data.cached_data().internal)
Expand Down
4 changes: 3 additions & 1 deletion src/human_encoding/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ mod ast;
use crate::dag::{Dag, DagLike, InternalSharing};
use crate::jet::Jet;
use crate::node;
use crate::types::Type;
use crate::types::{self, Type};
use std::collections::HashMap;
use std::mem;
use std::sync::atomic::{AtomicUsize, Ordering};
Expand Down Expand Up @@ -419,6 +419,7 @@ pub fn parse<J: Jet + 'static>(
drop(unresolved_map);

// ** Step 3: convert each DAG of names/expressions into a DAG of NamedNodes.
let inference_context = types::Context::new();
let mut roots = HashMap::<Arc<str>, Arc<NamedCommitNode<J>>>::new();
for (name, expr) in &resolved_map {
if expr.in_degree.load(Ordering::SeqCst) > 0 {
Expand Down Expand Up @@ -485,6 +486,7 @@ pub fn parse<J: Jet + 'static>(
.unwrap_or_else(|| Arc::from(namer.assign_name(inner.as_ref()).as_str()));

let node = NamedConstructNode::new(
&inference_context,
Arc::clone(&name),
data.node.position,
Arc::clone(&data.node.user_source_types),
Expand Down
3 changes: 2 additions & 1 deletion src/jet/elements/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use std::sync::Arc;
use crate::jet::elements::{ElementsEnv, ElementsUtxo};
use crate::jet::Elements;
use crate::node::{ConstructNode, JetConstructible};
use crate::types;
use crate::{BitMachine, Cmr, Value};
use elements::secp256k1_zkp::Tweak;
use elements::taproot::ControlBlock;
Expand Down Expand Up @@ -99,7 +100,7 @@ fn test_ffi_env() {
BlockHash::all_zeros(),
);

let prog = Arc::<ConstructNode<_>>::jet(Elements::LockTime);
let prog = Arc::<ConstructNode<_>>::jet(&types::Context::new(), Elements::LockTime);
assert_eq!(
BitMachine::test_exec(prog, &env).expect("executing"),
Value::u32(100),
Expand Down
13 changes: 8 additions & 5 deletions src/jet/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,18 +93,20 @@ pub trait Jet:
mod tests {
use crate::jet::Core;
use crate::node::{ConstructNode, CoreConstructible, JetConstructible};
use crate::types;
use crate::{BitMachine, Value};
use std::sync::Arc;

#[test]
fn test_ffi_jet() {
let ctx = types::Context::new();
let two_words = Arc::<ConstructNode<_>>::comp(
&Arc::<ConstructNode<_>>::pair(
&Arc::<ConstructNode<_>>::const_word(Value::u32(2)),
&Arc::<ConstructNode<_>>::const_word(Value::u32(16)),
&Arc::<ConstructNode<_>>::const_word(&ctx, Value::u32(2)),
&Arc::<ConstructNode<_>>::const_word(&ctx, Value::u32(16)),
)
.unwrap(),
&Arc::<ConstructNode<_>>::jet(Core::Add32),
&Arc::<ConstructNode<_>>::jet(&ctx, Core::Add32),
)
.unwrap();
assert_eq!(
Expand All @@ -118,9 +120,10 @@ mod tests {

#[test]
fn test_simple() {
let ctx = types::Context::new();
let two_words = Arc::<ConstructNode<Core>>::pair(
&Arc::<ConstructNode<_>>::const_word(Value::u32(2)),
&Arc::<ConstructNode<_>>::const_word(Value::u16(16)),
&Arc::<ConstructNode<_>>::const_word(&ctx, Value::u32(2)),
&Arc::<ConstructNode<_>>::const_word(&ctx, Value::u16(16)),
)
.unwrap();
assert_eq!(
Expand Down
4 changes: 3 additions & 1 deletion src/merkle/amr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,11 +291,13 @@ mod tests {

use crate::jet::Core;
use crate::node::{ConstructNode, JetConstructible};
use crate::types;
use std::sync::Arc;

#[test]
fn fixed_amr() {
let node = Arc::<ConstructNode<_>>::jet(Core::Verify)
let ctx = types::Context::new();
let node = Arc::<ConstructNode<_>>::jet(&ctx, Core::Verify)
.finalize_types_non_program()
.unwrap();
// Checked against C implementation
Expand Down
Loading