Skip to content

Commit

Permalink
feat-decompress spl
Browse files Browse the repository at this point in the history
  • Loading branch information
ananas-block committed Mar 29, 2024
1 parent fdb9fff commit a85d4e7
Show file tree
Hide file tree
Showing 5 changed files with 139 additions and 21 deletions.
19 changes: 8 additions & 11 deletions programs/compressed-pda/src/de_compression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ pub struct InitializeCompressedSolPda<'info> {
#[account(
init,
payer = fee_payer,
seeds = [&COMPRESSED_SOL_PDA_SEED],
seeds = [COMPRESSED_SOL_PDA_SEED],
bump,
space = CompressedSolPda::LEN,
)]
Expand Down Expand Up @@ -86,26 +86,23 @@ pub fn compress_lamports<'a, 'b, 'c: 'info, 'info>(
}

pub fn transfer_lamports<'info>(
sender: &AccountInfo<'info>,
receiver: &AccountInfo<'info>,
from: &AccountInfo<'info>,
to: &AccountInfo<'info>,
authority: &AccountInfo<'info>,
lamports: u64,
) -> Result<()> {
msg!("transfer_lamports {}", lamports);
msg!("sender lamports: {}", sender.lamports());
msg!("receiver lamports: {}", receiver.lamports());
let instruction = anchor_lang::solana_program::system_instruction::transfer(
sender.key,
receiver.key,
lamports,
);
msg!("from lamports: {}", from.lamports());
msg!("to lamports: {}", to.lamports());
let instruction =
anchor_lang::solana_program::system_instruction::transfer(from.key, to.key, lamports);
let (seed, bump) = get_seeds(&crate::ID, &authority.key())?;
let bump = &[bump];
let seeds = &[&[b"cpi_authority", seed.as_slice(), bump][..]];

anchor_lang::solana_program::program::invoke_signed(
&instruction,
&[authority.clone(), sender.clone(), receiver.clone()],
&[authority.clone(), from.clone(), to.clone()],
seeds,
)?;
Ok(())
Expand Down
2 changes: 1 addition & 1 deletion programs/compressed-pda/tests/test.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// #![cfg(feature = "test-sbf")]
#![cfg(feature = "test-sbf")]

use std::{assert_eq, println, vec::Vec};

Expand Down
90 changes: 90 additions & 0 deletions programs/compressed-token/src/de_compress.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
use crate::{CompressedTokenInstructionDataTransfer, TransferInstruction};
use anchor_lang::{prelude::*, solana_program::account_info::AccountInfo};
use anchor_spl::token::Transfer;
use psp_compressed_pda::append_state::get_seeds;

pub fn de_compress_amount<'a, 'info>(
inputs: &'a CompressedTokenInstructionDataTransfer,
ctx: &Context<'_, '_, '_, 'info, TransferInstruction<'info>>,
) -> Result<()> {
if inputs.is_compress {
compress_spl_tokens(inputs, ctx)
} else if inputs.de_compress_amount.is_some() {
decompress_spl_tokens(inputs, ctx)
} else {
Ok(())
}
}

pub fn decompress_spl_tokens<'a, 'info>(
inputs: &'a CompressedTokenInstructionDataTransfer,
ctx: &Context<'_, '_, '_, 'info, TransferInstruction<'info>>,
) -> Result<()> {
let recipient = match ctx.accounts.decompress_token_account.as_ref() {
Some(de_compress_recipient) => de_compress_recipient.to_account_info(),
None => return err!(crate::ErrorCode::DecompressRecipientUndefinedForDecompress),
};
let token_pool_pda = match ctx.accounts.token_pool_pda.as_ref() {
Some(token_pool_pda) => token_pool_pda.to_account_info(),
None => return err!(crate::ErrorCode::CompressedPdaUndefinedForDecompress),
};
let lamports = match inputs.de_compress_amount {
Some(lamports) => lamports,
None => return err!(crate::ErrorCode::DeCompressAmountUndefinedForDecompress),
};
token_pool_pda.sub_lamports(lamports)?;
recipient.add_lamports(lamports)?;
Ok(())
}

pub fn compress_spl_tokens<'a, 'info>(
inputs: &'a CompressedTokenInstructionDataTransfer,
ctx: &Context<'_, '_, '_, 'info, TransferInstruction<'info>>,
) -> Result<()> {
let recipient = match ctx.accounts.token_pool_pda.as_ref() {
Some(token_pool_pda) => token_pool_pda.to_account_info(),
None => return err!(crate::ErrorCode::CompressedPdaUndefinedForCompress),
};
let lamports = match inputs.de_compress_amount {
Some(lamports) => lamports,
None => return err!(crate::ErrorCode::DeCompressAmountUndefinedForCompress),
};

transfer(
&ctx.accounts
.decompress_token_account
.as_ref()
.unwrap()
.to_account_info(),
&recipient,
&ctx.accounts
.psp_account_compression_authority
.to_account_info(),
&ctx.accounts
.token_program
.as_ref()
.unwrap()
.to_account_info(),
lamports,
)
}

pub fn transfer<'info>(
from: &AccountInfo<'info>,
to: &AccountInfo<'info>,
authority: &AccountInfo<'info>,
token_program: &AccountInfo<'info>,
amount: u64,
) -> Result<()> {
let (seed, bump) = get_seeds(&crate::ID, &authority.key())?;
let bump = &[bump];
let seeds = &[&[b"cpi_authority", seed.as_slice(), bump][..]];

let accounts = Transfer {
from: from.to_account_info(),
to: to.to_account_info(),
authority: authority.to_account_info(),
};
let cpi_ctx = CpiContext::new_with_signer(token_program.to_account_info(), accounts, seeds);
anchor_spl::token::transfer(cpi_ctx, amount)
}
11 changes: 11 additions & 0 deletions programs/compressed-token/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use anchor_lang::prelude::*;

pub mod de_compress;
pub mod process_mint;
pub mod process_transfer;

Expand Down Expand Up @@ -211,4 +212,14 @@ pub enum ErrorCode {
ComputeDecompressSumFailed,
#[msg("SumCheckFailed")]
SumCheckFailed,
#[msg("DecompressRecipientUndefinedForDecompress")]
DecompressRecipientUndefinedForDecompress,
#[msg("CompressedPdaUndefinedForDecompress")]
CompressedPdaUndefinedForDecompress,
#[msg("DeCompressAmountUndefinedForDecompress")]
DeCompressAmountUndefinedForDecompress,
#[msg("CompressedPdaUndefinedForCompress")]
CompressedPdaUndefinedForCompress,
#[msg("DeCompressAmountUndefinedForCompress")]
DeCompressAmountUndefinedForCompress,
}
38 changes: 29 additions & 9 deletions programs/compressed-token/src/process_transfer.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use anchor_lang::{prelude::*, AnchorDeserialize};
use anchor_spl::token::{Token, TokenAccount};
use light_hasher::{errors::HasherError, DataHasher, Hasher, Poseidon};
use light_utils::hash_to_bn254_field_size_le;
use psp_compressed_pda::{
Expand All @@ -9,7 +10,7 @@ use psp_compressed_pda::{
InstructionDataTransfer as PspCompressedPdaInstructionDataTransfer,
};

use crate::ErrorCode;
use crate::{de_compress::de_compress_amount, ErrorCode};

/// Process a token transfer instruction
///
Expand Down Expand Up @@ -46,6 +47,7 @@ pub fn process_transfer<'a, 'b, 'c, 'info: 'b + 'c>(
None,
true,
)?;
de_compress_amount(&inputs, &ctx)?;

let output_compressed_accounts = crate::create_output_compressed_accounts(
mint,
Expand Down Expand Up @@ -259,18 +261,25 @@ pub struct TransferInstruction<'info> {
pub account_compression_program:
Program<'info, account_compression::program::AccountCompression>,
pub self_program: Program<'info, crate::program::PspCompressedToken>,
#[account(mut)]
pub token_pool_pda: Option<Account<'info, TokenAccount>>,
#[account(mut)]
pub decompress_token_account: Option<Account<'info, TokenAccount>>,
pub token_program: Option<Program<'info, Token>>,
}

// TODO: parse compressed_accounts a more efficient way, since owner is sent multiple times this way
// This struct is equivalent to the InstructionDataTransfer, but uses the imported types from the psp_compressed_pda
#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)]
pub struct CompressedTokenInstructionDataTransfer {
proof: Option<CompressedProof>,
root_indices: Vec<u16>,
input_compressed_accounts_with_merkle_context: Vec<CompressedAccountWithMerkleContext>,
input_token_data: Vec<TokenData>,
output_compressed_accounts: Vec<TokenTransferOutputData>,
output_state_merkle_tree_account_indices: Vec<u8>,
pub proof: Option<CompressedProof>,
pub root_indices: Vec<u16>,
pub input_compressed_accounts_with_merkle_context: Vec<CompressedAccountWithMerkleContext>,
pub input_token_data: Vec<TokenData>,
pub output_compressed_accounts: Vec<TokenTransferOutputData>,
pub output_state_merkle_tree_account_indices: Vec<u8>,
pub is_compress: bool,
pub de_compress_amount: Option<u64>,
}

#[derive(Clone, Copy, Debug, PartialEq, Eq, AnchorSerialize, AnchorDeserialize)]
Expand Down Expand Up @@ -392,15 +401,16 @@ pub fn get_cpi_authority_pda() -> (Pubkey, u8) {
pub mod transfer_sdk {
use std::collections::HashMap;

use crate::{CompressedTokenInstructionDataTransfer, TokenTransferOutputData};
use account_compression::{AccountMeta, NOOP_PROGRAM_ID};
use anchor_lang::{AnchorDeserialize, AnchorSerialize, InstructionData, ToAccountMetas};
use anchor_lang::{AnchorDeserialize, AnchorSerialize, Id, InstructionData, ToAccountMetas};
use anchor_spl::token::Token;
use psp_compressed_pda::{
compressed_account::{CompressedAccount, CompressedAccountWithMerkleContext},
utils::CompressedProof,
};
use solana_sdk::{instruction::Instruction, pubkey::Pubkey};

use crate::{CompressedTokenInstructionDataTransfer, TokenTransferOutputData};
#[allow(clippy::too_many_arguments)]
pub fn create_transfer_instruction(
fee_payer: &Pubkey,
Expand All @@ -413,6 +423,10 @@ pub mod transfer_sdk {
root_indices: &[u16],
leaf_indices: &[u32],
proof: &CompressedProof,
is_compress: bool,
de_compress_amount: Option<u64>,
token_pool_pda: Option<Pubkey>,
decompress_token_account: Option<Pubkey>,
) -> Instruction {
let mut remaining_accounts = HashMap::<Pubkey, usize>::new();
let mut input_compressed_accounts_with_merkle_context: Vec<
Expand Down Expand Up @@ -493,6 +507,8 @@ pub mod transfer_sdk {
input_token_data: input_compressed_account_token_data,
// TODO: support multiple output state merkle trees
output_state_merkle_tree_account_indices,
is_compress,
de_compress_amount,
};
let mut inputs = Vec::new();
CompressedTokenInstructionDataTransfer::serialize(&inputs_struct, &mut inputs).unwrap();
Expand All @@ -514,6 +530,10 @@ pub mod transfer_sdk {
),
account_compression_program: account_compression::ID,
self_program: crate::ID,
token_pool_pda: token_pool_pda.map(|p| crate::get_token_pool_pda(&p)),
decompress_token_account: decompress_token_account
.map(|p| crate::get_token_pool_pda(&p)),
token_program: token_pool_pda.map(|_| Token::id()),
};

Instruction {
Expand Down

0 comments on commit a85d4e7

Please sign in to comment.