diff --git a/Cargo.lock b/Cargo.lock index fd83c0617b..41deb2712a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 4 +version = 3 [[package]] name = "Inflector" @@ -3373,6 +3373,44 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "light-compressed-token-client" +version = "0.1.0" +dependencies = [ + "account-compression", + "anchor-lang", + "anchor-spl", + "borsh 0.10.4", + "light-compressed-account", + "light-compressed-token", + "light-system-program-anchor", + "num-bigint 0.4.6", + "num-traits", + "solana-account", + "solana-hash", + "solana-instruction", + "solana-program", + "solana-pubkey", + "solana-sdk", + "solana-signature", + "solana-transaction", + "thiserror 2.0.12", + "tokio", +] + +[[package]] +name = "light-compressed-token-sdk" +version = "0.1.0" +dependencies = [ + "anchor-lang", + "borsh 0.10.4", + "light-compressed-account", + "light-heap", + "num-bigint 0.4.6", + "solana-program", + "solana-sdk", +] + [[package]] name = "light-concurrent-merkle-tree" version = "2.1.0" diff --git a/Cargo.toml b/Cargo.toml index 3b5c520456..3ceb2d205b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,6 +23,8 @@ members = [ "sdk-libs/sdk", "sdk-libs/sdk-pinocchio", "sdk-libs/sdk-types", + "sdk-libs/compressed-token-sdk", + "sdk-libs/compressed-token-client", "sdk-libs/photon-api", "sdk-libs/program-test", "xtask", @@ -175,6 +177,10 @@ account-compression = { path = "programs/account-compression", version = "2.0.0" light-compressed-token = { path = "programs/compressed-token", version = "2.0.0", features = [ "cpi", ] } +light-compressed-token-client = { path = "sdk-libs/compressed-token-client", version = "0.1.0" } +light-system-program = { path = "anchor-programs/system", version = "2.0.0", features = [ + "cpi", +] } light-system-program-anchor = { path = "anchor-programs/system", version = "2.0.0", features = [ "cpi", ] } diff --git a/cli/package.json b/cli/package.json index 20263f0311..9d5fd8065d 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,6 +1,6 @@ { "name": "@lightprotocol/zk-compression-cli", - "version": "0.25.0", + "version": "0.27.0", "description": "ZK Compression: Secure Scaling on Solana", "maintainers": [ { @@ -17,54 +17,30 @@ "/accounts", "/bin", "!bin/cargo-generate", + "!/bin/**/*.vkey", + "/bin/proving-keys/combined_26_1_1.key", + "/bin/proving-keys/combined_26_1_2.key", + "/bin/proving-keys/combined_26_2_1.key", + "/bin/proving-keys/combined_32_40_1_1.key", + "/bin/proving-keys/combined_32_40_1_2.key", + "/bin/proving-keys/combined_32_40_2_1.key", + "/bin/proving-keys/inclusion_32_1.key", + "/bin/proving-keys/inclusion_32_2.key", + "/bin/proving-keys/inclusion_32_3.key", + "/bin/proving-keys/inclusion_32_4.key", + "/bin/proving-keys/mainnet_inclusion_26_1.key", + "/bin/proving-keys/mainnet_inclusion_26_2.key", + "/bin/proving-keys/mainnet_inclusion_26_3.key", + "/bin/proving-keys/mainnet_inclusion_26_4.key", + "/bin/proving-keys/non-inclusion_26_1.key", + "/bin/proving-keys/non-inclusion_26_2.key", + "/bin/proving-keys/non-inclusion_40_1.key", + "/bin/proving-keys/non-inclusion_40_2.key", "/dist", "/test_bin", "./config.json", "/npm-shrinkwrap.json", - "/oclif.manifest.json", - "bin/proving-keys/combined_26_1_1.key", - "bin/proving-keys/combined_26_1_1.vkey", - "bin/proving-keys/combined_26_1_10.key", - "bin/proving-keys/combined_26_1_10.vkey", - "bin/proving-keys/combined_26_10_1.key", - "bin/proving-keys/combined_26_10_1.vkey", - "bin/proving-keys/combined_26_10_10.key", - "bin/proving-keys/combined_26_10_10.vkey", - "bin/proving-keys/inclusion_26_1.key", - "bin/proving-keys/inclusion_26_1.vkey", - "bin/proving-keys/inclusion_26_10.key", - "bin/proving-keys/inclusion_26_10.vkey", - "bin/proving-keys/non-inclusion_26_1.key", - "bin/proving-keys/non-inclusion_26_1.vkey", - "bin/proving-keys/non-inclusion_26_10.key", - "bin/proving-keys/non-inclusion_26_10.vkey", - "!bin/proving-keys/update_26_10.key", - "!bin/proving-keys/update_26_10.vkey", - "!bin/proving-keys/address-append_40_1.key", - "!bin/proving-keys/address-append_40_1.vkey", - "!bin/proving-keys/address-append_40_10.key", - "!bin/proving-keys/address-append_40_10.vkey", - "!bin/proving-keys/append-with-proofs_26_10.key", - "!bin/proving-keys/append-with-proofs_26_10.vkey", - "!bin/proving-keys/append-with-subtrees_26_10.key", - "!bin/proving-keys/append-with-subtrees_26_10.vkey", - "!bin/proving-keys/non-inclusion_40_1.vkey", - "!bin/proving-keys/non-inclusion_40_2.vkey", - "!bin/proving-keys/non-inclusion_40_3.vkey", - "!bin/proving-keys/non-inclusion_40_4.vkey", - "!bin/proving-keys/non-inclusion_40_8.vkey", - "!bin/proving-keys/mainnet_inclusion_26_1.key", - "!bin/proving-keys/mainnet_inclusion_26_1.vkey", - "!bin/proving-keys/mainnet_inclusion_26_2.vkey", - "!bin/proving-keys/mainnet_inclusion_26_3.vkey", - "!bin/proving-keys/mainnet_inclusion_26_4.vkey", - "!bin/proving-keys/mainnet_inclusion_26_8.vkey", - "!bin/proving-keys/inclusion_32_1.key", - "!bin/proving-keys/inclusion_32_1.vkey", - "!bin/proving-keys/inclusion_32_2.vkey", - "!bin/proving-keys/inclusion_32_3.vkey", - "!bin/proving-keys/inclusion_32_4.vkey", - "!bin/proving-keys/inclusion_32_8.vkey" + "/oclif.manifest.json" ], "dependencies": { "@coral-xyz/anchor": "0.29.0", diff --git a/cli/scripts/buildProver.sh b/cli/scripts/buildProver.sh index d71ec31974..79f8f31909 100755 --- a/cli/scripts/buildProver.sh +++ b/cli/scripts/buildProver.sh @@ -9,6 +9,7 @@ build_prover() { root_dir="$(git rev-parse --show-toplevel)" gnark_dir="${root_dir}/prover/server" out_dir="${root_dir}/cli/bin" +cli_dir="${root_dir}/cli" if [ ! -e "$out_dir" ]; then mkdir -p "$out_dir" @@ -21,7 +22,27 @@ if [ ! -d "${gnark_dir}/proving-keys" ] || [ -z "$(ls -A "${gnark_dir}/proving-k exit 1 fi -cp -r "${gnark_dir}/proving-keys" "$out_dir" +# Create proving-keys directory in output +mkdir -p "$out_dir/proving-keys" + +# Dynamically read .key files from package.json files field +# Extract all lines containing "/bin/proving-keys/" and ".key" +key_files=$(node -e " +const pkg = require('${cli_dir}/package.json'); +const keyFiles = pkg.files + .filter(f => f.includes('/bin/proving-keys/') && f.endsWith('.key')) + .map(f => f.split('/').pop()); +console.log(keyFiles.join(' ')); +") + +# Copy only the specified .key files +for key_file in $key_files; do + if [ -f "${gnark_dir}/proving-keys/${key_file}" ]; then + cp "${gnark_dir}/proving-keys/${key_file}" "$out_dir/proving-keys/${key_file}" + else + echo "WARNING: ${key_file} not found in ${gnark_dir}/proving-keys" + fi +done cd "$gnark_dir" diff --git a/cli/src/commands/start-prover/index.ts b/cli/src/commands/start-prover/index.ts index 792e75f935..de6f5b4abb 100644 --- a/cli/src/commands/start-prover/index.ts +++ b/cli/src/commands/start-prover/index.ts @@ -17,8 +17,15 @@ class StartProver extends Command { }), "run-mode": Flags.string({ description: - "Specify the running mode (forester, forester-test, rpc, full, or full-test)", - options: ["rpc", "forester", "forester-test", "full", "full-test"], + "Specify the running mode (local-rpc, forester, forester-test, rpc, or full). Default: local-rpc", + options: [ + "local-rpc", + "rpc", + "forester", + "forester-test", + "full", + "full-test", + ], required: false, }), circuit: Flags.string({ @@ -63,13 +70,16 @@ class StartProver extends Command { const proverPort = flags["prover-port"] || 3001; const force = flags["force"] || false; const redisUrl = flags["redisUrl"] || process.env.REDIS_URL || undefined; - await startProver( - proverPort, - flags["run-mode"], - flags["circuit"], - force, - redisUrl, - ); + + // TODO: remove this workaround. + // Force local-rpc mode when rpc is specified + let runMode = flags["run-mode"]; + if (runMode === "rpc") { + runMode = "local-rpc"; + this.log("Note: Running in local-rpc mode instead of rpc mode"); + } + + await startProver(proverPort, runMode, flags["circuit"], force, redisUrl); const healthy = await healthCheck(proverPort, 10, 1000); loader.stop(); diff --git a/cli/src/commands/test-validator/index.ts b/cli/src/commands/test-validator/index.ts index 6ef90b0481..f23522a801 100644 --- a/cli/src/commands/test-validator/index.ts +++ b/cli/src/commands/test-validator/index.ts @@ -77,8 +77,9 @@ class SetupCommand extends Command { }), "prover-run-mode": Flags.string({ description: - "Specify the running mode for the prover (forester, forester-test, rpc, or full)", + "Specify the running mode for the prover (local-rpc, forester, forester-test, rpc, or full). Default: local-rpc", options: [ + "local-rpc", "rpc", "forester", "forester-test", diff --git a/cli/src/utils/constants.ts b/cli/src/utils/constants.ts index 4324d95b2a..ef6f140ef2 100644 --- a/cli/src/utils/constants.ts +++ b/cli/src/utils/constants.ts @@ -19,7 +19,12 @@ export const SOLANA_VALIDATOR_PROCESS_NAME = "solana-test-validator"; export const LIGHT_PROVER_PROCESS_NAME = "light-prover"; export const INDEXER_PROCESS_NAME = "photon"; -export const PHOTON_VERSION = "0.50.0"; +export const PHOTON_VERSION = "0.50.1"; + +// Set these to override Photon requirements with a specific git commit: +export const USE_PHOTON_FROM_GIT = true; // If true, will show git install command instead of crates.io. +export const PHOTON_GIT_REPO = "https://github.com/lightprotocol/photon.git"; +export const PHOTON_GIT_COMMIT = "49b7e7f0d668babbc4d65fe8a0a7236df76f75a8"; // If empty, will use main branch. export const LIGHT_PROTOCOL_PROGRAMS_DIR_ENV = "LIGHT_PROTOCOL_PROGRAMS_DIR"; export const BASE_PATH = "../../bin/"; @@ -29,7 +34,7 @@ export const SOLANA_SDK_VERSION = "2.2"; export const ANCHOR_VERSION = "0.31.1"; export const COMPRESSED_PROGRAM_TEMPLATE_TAG = "v0.3.1"; export const TOKIO_VERSION = "1.36.0"; -export const LIGHT_CLI_VERSION = "0.26.0"; +export const LIGHT_CLI_VERSION = "0.27.0"; export const SOLANA_CLI_VERSION = "2.2.15"; export const LIGHT_HASHER_VERSION = "3.1.0"; diff --git a/cli/src/utils/initTestEnv.ts b/cli/src/utils/initTestEnv.ts index a2356de889..b500ef82a7 100644 --- a/cli/src/utils/initTestEnv.ts +++ b/cli/src/utils/initTestEnv.ts @@ -109,6 +109,7 @@ export async function initTestEnv({ photonDatabaseUrl?: string; limitLedgerSize?: number; proverRunMode?: + | "local-rpc" | "inclusion" | "non-inclusion" | "forester" diff --git a/cli/src/utils/processPhotonIndexer.ts b/cli/src/utils/processPhotonIndexer.ts index 8a875a0a11..1945818bc1 100644 --- a/cli/src/utils/processPhotonIndexer.ts +++ b/cli/src/utils/processPhotonIndexer.ts @@ -1,8 +1,40 @@ import which from "which"; import { killProcess, spawnBinary, waitForServers } from "./process"; -import { INDEXER_PROCESS_NAME, PHOTON_VERSION } from "./constants"; +import { + INDEXER_PROCESS_NAME, + PHOTON_VERSION, + USE_PHOTON_FROM_GIT, + PHOTON_GIT_REPO, + PHOTON_GIT_COMMIT, +} from "./constants"; import { exec } from "node:child_process"; import * as util from "node:util"; +import { exit } from "node:process"; + +const execAsync = util.promisify(exec); + +async function isExpectedPhotonVersion( + requiredVersion: string, +): Promise { + try { + const { stdout } = await execAsync("photon --version"); + const version = stdout.trim(); + return version.includes(requiredVersion); + } catch (error) { + console.error("Error checking Photon version:", error); + return false; + } +} + +function getPhotonInstallMessage(): string { + if (USE_PHOTON_FROM_GIT && PHOTON_GIT_COMMIT) { + return `\nLatest Photon indexer not found. Please install it by running: "cargo install --git ${PHOTON_GIT_REPO} --rev ${PHOTON_GIT_COMMIT} --locked"`; + } else if (USE_PHOTON_FROM_GIT) { + return `\nLatest Photon indexer not found. Please install it by running: "cargo install --git ${PHOTON_GIT_REPO} --locked"`; + } else { + return `\nLatest Photon indexer not found. Please install it by running: "cargo install photon-indexer --version ${PHOTON_VERSION} --locked"`; + } +} export async function startIndexer( rpcUrl: string, @@ -16,9 +48,8 @@ export async function startIndexer( resolvedOrNull === null || (checkPhotonVersion && !(await isExpectedPhotonVersion(PHOTON_VERSION))) ) { - const message = `Photon indexer not found. Please install it by running "cargo install photon-indexer --version ${PHOTON_VERSION} --locked"`; - console.log(message); - throw new Error(message); + console.log(getPhotonInstallMessage()); + return exit(1); } else { console.log("Starting indexer..."); const args: string[] = [ @@ -39,17 +70,3 @@ export async function startIndexer( export async function killIndexer() { await killProcess(INDEXER_PROCESS_NAME); } - -const execAsync = util.promisify(exec); -async function isExpectedPhotonVersion( - requiredVersion: string, -): Promise { - try { - const { stdout } = await execAsync("photon --version"); - const version = stdout.trim(); - return version.includes(requiredVersion); - } catch (error) { - console.error("Error checking Photon version:", error); - return false; - } -} diff --git a/cli/src/utils/processProverServer.ts b/cli/src/utils/processProverServer.ts index bdd7f0b84c..af85ca26a5 100644 --- a/cli/src/utils/processProverServer.ts +++ b/cli/src/utils/processProverServer.ts @@ -122,7 +122,7 @@ export async function startProver( } if ((!circuits || circuits.length === 0) && runMode == null) { - runMode = "rpc"; + runMode = "local-rpc"; args.push("--run-mode", runMode); console.log(`Starting prover with fallback ${runMode} mode...`); } diff --git a/js/compressed-token/package.json b/js/compressed-token/package.json index f8fc8bad4c..c142f89db7 100644 --- a/js/compressed-token/package.json +++ b/js/compressed-token/package.json @@ -1,6 +1,6 @@ { "name": "@lightprotocol/compressed-token", - "version": "0.21.0", + "version": "0.22.0", "description": "JS client to interact with the compressed-token program", "sideEffects": false, "main": "dist/cjs/node/index.cjs", @@ -85,7 +85,7 @@ "test:unit:all:v1": "LIGHT_PROTOCOL_VERSION=V1 vitest run tests/unit --reporter=verbose", "test:unit:all:v2": "LIGHT_PROTOCOL_VERSION=V2 vitest run tests/unit --reporter=verbose", "test-all:verbose": "vitest run --reporter=verbose", - "test-validator": "./../../cli/test_bin/run test-validator --prover-run-mode rpc", + "test-validator": "./../../cli/test_bin/run test-validator", "test-validator-skip-prover": "./../../cli/test_bin/run test-validator --skip-prover", "test:e2e:create-mint": "pnpm test-validator && NODE_OPTIONS='--trace-deprecation' vitest run tests/e2e/create-mint.test.ts --reporter=verbose", "test:e2e:layout": "vitest run tests/e2e/layout.test.ts --reporter=verbose --bail=1", diff --git a/js/compressed-token/src/actions/create-mint.ts b/js/compressed-token/src/actions/create-mint.ts index 8c24fd6277..a107dec2be 100644 --- a/js/compressed-token/src/actions/create-mint.ts +++ b/js/compressed-token/src/actions/create-mint.ts @@ -14,6 +14,7 @@ import { import { Rpc, buildAndSignTx, + dedupeSigner, sendAndConfirmTx, } from '@lightprotocol/stateless.js'; @@ -59,26 +60,27 @@ export async function createMint( feePayer: payer.publicKey, mint: keypair.publicKey, decimals, - authority: getPublicKey(mintAuthority)!, - freezeAuthority: getPublicKey(freezeAuthority), + authority: + 'secretKey' in mintAuthority + ? mintAuthority.publicKey + : mintAuthority, + freezeAuthority: + freezeAuthority && 'secretKey' in freezeAuthority + ? freezeAuthority.publicKey + : (freezeAuthority ?? null), rentExemptBalance, tokenProgramId: resolvedTokenProgramId, }); const { blockhash } = await rpc.getLatestBlockhash(); - // Get required additional signers that are Keypairs, not the payer. - const additionalSigners = [mintAuthority, freezeAuthority] - .filter( + const additionalSigners = dedupeSigner( + payer, + [mintAuthority, freezeAuthority].filter( (signer): signer is Signer => - signer instanceof Keypair && - !signer.publicKey.equals(payer.publicKey), - ) - .filter( - (signer, index, array) => - array.findIndex(s => s.publicKey.equals(signer.publicKey)) === - index, - ); + signer != undefined && 'secretKey' in signer, + ), + ); const tx = buildAndSignTx(ixs, payer, blockhash, [ ...additionalSigners, @@ -88,8 +90,3 @@ export async function createMint( return { mint: keypair.publicKey, transactionSignature: txId }; } - -const getPublicKey = ( - signer: PublicKey | Signer | undefined, -): PublicKey | null => - signer instanceof PublicKey ? signer : signer?.publicKey || null; diff --git a/js/compressed-token/src/program.ts b/js/compressed-token/src/program.ts index 22f111feb4..ac19ee2c1f 100644 --- a/js/compressed-token/src/program.ts +++ b/js/compressed-token/src/program.ts @@ -739,6 +739,7 @@ export class CompressedTokenProgram { programId: tokenProgram, space: mintSize ?? MINT_SIZE, }); + const initializeMintInstruction = createInitializeMint2Instruction( mint, decimals, diff --git a/js/stateless.js/CHANGELOG.md b/js/stateless.js/CHANGELOG.md index 231faf672a..92fa94e707 100644 --- a/js/stateless.js/CHANGELOG.md +++ b/js/stateless.js/CHANGELOG.md @@ -1,5 +1,31 @@ # Changelog +## [0.22.0] - 2025-06-16 + +### Breaking Changes + +(stateless.js) SOL transfers with zkcompression don't accept `stateTreeInfo` as param anymore. + +```typescript +// old +await transfer( + connection, + fromKeypair, + 1, + fromKeypair, + fromKeypair.publicKey, + stateTreeInfo, + { + skipPreflight: false, + }, +); + +// new +await transfer(connection, fromKeypair, 1, fromKeypair, fromKeypair.publicKey, { + skipPreflight: false, +}); +``` + ## [0.21.0] - 2025-04-08 This release has several breaking changes which are necessary for protocol diff --git a/js/stateless.js/package.json b/js/stateless.js/package.json index b8a18226a2..411cd02345 100644 --- a/js/stateless.js/package.json +++ b/js/stateless.js/package.json @@ -1,6 +1,6 @@ { "name": "@lightprotocol/stateless.js", - "version": "0.21.0", + "version": "0.22.0", "description": "JavaScript API for Light & ZK Compression", "sideEffects": false, "main": "dist/cjs/node/index.cjs", @@ -94,7 +94,7 @@ "test:unit:all:v2": "LIGHT_PROTOCOL_VERSION=V2 vitest run tests/unit --reporter=verbose", "test:unit:tree-info": "vitest run tests/unit/utils/tree-info.test.ts --reporter=verbose", "test:conversions": "vitest run tests/unit/utils/conversion.test.ts --reporter=verbose", - "test-validator": "./../../cli/test_bin/run test-validator --prover-run-mode rpc", + "test-validator": "./../../cli/test_bin/run test-validator", "test-validator-skip-prover": "./../../cli/test_bin/run test-validator --skip-prover", "test:e2e:transfer": "pnpm test-validator && vitest run tests/e2e/transfer.test.ts --reporter=verbose", "test:e2e:compress": "pnpm test-validator && vitest run tests/e2e/compress.test.ts --reporter=verbose", diff --git a/js/stateless.js/src/rpc.ts b/js/stateless.js/src/rpc.ts index 9bc3660e5f..d0b08b26c6 100644 --- a/js/stateless.js/src/rpc.ts +++ b/js/stateless.js/src/rpc.ts @@ -629,7 +629,10 @@ export class Rpc extends Connection implements CompressionApiInterface { this.compressionApiEndpoint = compressionApiEndpoint; this.proverEndpoint = proverEndpoint; } - + /** + * @deprecated Use {@link getStateTreeInfos} instead + */ + async getCachedActiveStateTreeInfo() {} /** * @deprecated Use {@link getStateTreeInfos} instead */ diff --git a/js/stateless.js/src/test-helpers/test-rpc/test-rpc.ts b/js/stateless.js/src/test-helpers/test-rpc/test-rpc.ts index 68cf299bd3..a523002893 100644 --- a/js/stateless.js/src/test-helpers/test-rpc/test-rpc.ts +++ b/js/stateless.js/src/test-helpers/test-rpc/test-rpc.ts @@ -161,6 +161,10 @@ export class TestRpc extends Connection implements CompressionApiInterface { this.log = log ?? false; } + /** + * @deprecated Use {@link getStateTreeInfos} instead + */ + async getCachedActiveStateTreeInfo() {} /** * @deprecated Use {@link getStateTreeInfos} instead */ diff --git a/prover/server/main.go b/prover/server/main.go index 33724a9d12..162f44be09 100644 --- a/prover/server/main.go +++ b/prover/server/main.go @@ -890,11 +890,14 @@ func runCli() { } func parseRunMode(runModeString string) (prover.RunMode, error) { - runMode := prover.Rpc + runMode := prover.LocalRpc switch runModeString { case "rpc": logging.Logger().Info().Msg("Running in rpc mode") runMode = prover.Rpc + case "local-rpc": + logging.Logger().Info().Msg("Running in local-rpc mode") + runMode = prover.LocalRpc case "forester": logging.Logger().Info().Msg("Running in forester mode") runMode = prover.Forester diff --git a/prover/server/prover/proving_keys_utils.go b/prover/server/prover/proving_keys_utils.go index 8d83314b14..63c656eae3 100644 --- a/prover/server/prover/proving_keys_utils.go +++ b/prover/server/prover/proving_keys_utils.go @@ -20,6 +20,7 @@ const ( Rpc RunMode = "rpc" Full RunMode = "full" FullTest RunMode = "full-test" + LocalRpc RunMode = "local-rpc" ) // Trusted setup utility functions @@ -136,6 +137,28 @@ func GetKeys(keysDir string, runMode RunMode, circuits []string) []string { keysDir + "combined_32_40_4_2.key", } + // Keys for local-rpc mode - matching the 18 keys in cli/package.json + var localRpcKeys []string = []string{ + keysDir + "combined_26_1_1.key", + keysDir + "combined_26_1_2.key", + keysDir + "combined_26_2_1.key", + keysDir + "combined_32_40_1_1.key", + keysDir + "combined_32_40_1_2.key", + keysDir + "combined_32_40_2_1.key", + keysDir + "inclusion_32_1.key", + keysDir + "inclusion_32_2.key", + keysDir + "inclusion_32_3.key", + keysDir + "inclusion_32_4.key", + keysDir + "mainnet_inclusion_26_1.key", + keysDir + "mainnet_inclusion_26_2.key", + keysDir + "mainnet_inclusion_26_3.key", + keysDir + "mainnet_inclusion_26_4.key", + keysDir + "non-inclusion_26_1.key", + keysDir + "non-inclusion_26_2.key", + keysDir + "non-inclusion_40_1.key", + keysDir + "non-inclusion_40_2.key", + } + var appendWithProofsKeys []string = []string{ keysDir + "append-with-proofs_32_500.key", } @@ -192,7 +215,10 @@ func GetKeys(keysDir string, runMode RunMode, circuits []string) []string { keys = append(keys, updateTestKeys...) keys = append(keys, appendWithProofsTestKeys...) keys = append(keys, addressAppendTestKeys...) + case LocalRpc: + keys = append(keys, localRpcKeys...) } + for _, circuit := range circuits { switch circuit { diff --git a/scripts/build.sh b/scripts/build.sh index c353935861..089eaca8ca 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -3,8 +3,6 @@ command -v pnpm >/dev/null 2>&1 || { echo >&2 "pnpm is not installed. Aborting."; exit 1; } command -v npx >/dev/null 2>&1 || { echo >&2 "npx is not installed. Aborting."; exit 1; } -. "./scripts/devenv.sh" || { echo >&2 "Failed to source devenv.sh. Aborting."; exit 1; } - set -eux pnpm install || { echo >&2 "Failed to install dependencies. Aborting."; exit 1; } diff --git a/scripts/bump-versions-and-publish-npm.sh b/scripts/bump-versions-and-publish-npm.sh index a0a622e962..5995c1be4c 100755 --- a/scripts/bump-versions-and-publish-npm.sh +++ b/scripts/bump-versions-and-publish-npm.sh @@ -37,12 +37,12 @@ publish_package() { sleep 5 if [ "$version_type" == "alpha" ]; then - if ! (cd "${package_dir}" && pnpm version prerelease --preid alpha && pnpm publish --tag alpha --access private --no-git-checks); then + if ! (cd "${package_dir}" && pnpm version prerelease --preid alpha && pnpm publish --tag alpha --access private --no-git-checks --verbose); then echo "Error occurred while publishing ${package_name}." return 1 fi else - if ! (cd "${package_dir}" && pnpm version "${version_type}" && pnpm publish --access public --no-git-checks); then + if ! (cd "${package_dir}" && pnpm version "${version_type}" && pnpm publish --access public --no-git-checks --verbose); then echo "Error occurred while publishing ${package_name}." return 1 fi @@ -58,12 +58,12 @@ error_occurred=0 if [ "$#" -eq 0 ]; then echo "Bumping ${version_type} version for all packages..." if [ "$version_type" == "alpha" ]; then - if ! pnpm -r exec -- pnpm version prerelease --preid alpha || ! pnpm -r exec -- pnpm publish --tag alpha --access private; then + if ! pnpm -r exec -- pnpm version prerelease --preid alpha || ! pnpm -r exec -- pnpm publish --tag alpha --access private --verbose; then echo "Error occurred during bulk version bump and publish." error_occurred=1 fi else - if ! pnpm -r exec -- pnpm version "${version_type}" || ! pnpm -r exec -- pnpm publish --access public; then + if ! pnpm -r exec -- pnpm version "${version_type}" || ! pnpm -r exec -- pnpm publish --access public --verbose; then echo "Error occurred during bulk version bump and publish." error_occurred=1 fi diff --git a/scripts/install.sh b/scripts/install.sh index 7e2a6cfa38..2902a6754c 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -107,7 +107,8 @@ install_rust() { export PATH="${PREFIX}/cargo/bin:${PATH}" rustup component add --toolchain 1.86-x86_64-unknown-linux-gnu clippy cargo install cargo-expand --locked - cargo install --git https://github.com/helius-labs/photon.git --rev dbeb89e639bda78f0e135b9b1aa75bfe16618cb4 --locked + # temp - commit hash from PR as of 2025-06-16 + cargo install --git https://github.com/lightprotocol/photon.git --rev 49b7e7f0d668babbc4d65fe8a0a7236df76f75a8 --locked log "rust" else echo "Rust already installed, skipping..." @@ -424,4 +425,4 @@ main() { fi } -main "$@" +main "$@" \ No newline at end of file diff --git a/sdk-libs/compressed-token-client/Cargo.toml b/sdk-libs/compressed-token-client/Cargo.toml new file mode 100644 index 0000000000..038fe9565b --- /dev/null +++ b/sdk-libs/compressed-token-client/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "light-compressed-token-client" +version = "0.1.0" +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/lightprotocol/light-protocol" +description = "Client library for Light Protocol Compressed Token Program" + +[features] +default = [] + +[dependencies] +# Light Protocol dependencies +light-compressed-token = { workspace = true } +light-compressed-account = { workspace = true, features = ["anchor"] } +light-system-program-anchor = { workspace = true } +account-compression = { workspace = true } + +# Solana dependencies +solana-sdk = { workspace = true } +solana-pubkey = { workspace = true } +solana-instruction = { workspace = true } +solana-program = { workspace = true } +solana-transaction = { workspace = true } +solana-signature = { workspace = true } +solana-hash = { workspace = true } +solana-account = { workspace = true } + +# Anchor +anchor-lang = { workspace = true } +anchor-spl = { workspace = true } + +# External dependencies +borsh = { workspace = true } +num-bigint = { workspace = true } +num-traits = { workspace = true } +thiserror = { workspace = true } + +[dev-dependencies] +tokio = { workspace = true, features = ["rt", "macros"] } \ No newline at end of file diff --git a/sdk-libs/compressed-token-client/README.md b/sdk-libs/compressed-token-client/README.md new file mode 100644 index 0000000000..1dda037422 --- /dev/null +++ b/sdk-libs/compressed-token-client/README.md @@ -0,0 +1,86 @@ +# Light Compressed Token Client + +A Rust client library for interacting with the Compressed Token Program on Solana. + +## Overview + +- **Compress**: Compress SPL tokens to a recipient. +- **Decompress**: Decompress compressed tokens back to an SPL token account +- **Batch Compress**: Compress SPL tokens to multiple recipients in a single instruction. + +## Installation + +Add to your `Cargo.toml`: + +```toml +[dependencies] +light-compressed-token-client = "0.1.0" +``` + +## Usage + +### Compress to one recipient + +```rust +use light_compressed_token_client::{compress, CompressParams}; +use solana_pubkey::Pubkey; + +let instruction = compress( + payer, + owner, + source_token_account, + mint, + 1000, // amount + recipient, + output_state_tree, +)?; +``` + +### Compress to multiple recipients + +```rust +use light_compressed_token_client::batch_compress; + +// Compress to multiple recipients +let recipients = vec![ + (recipient1, 500), + (recipient2, 300), + (recipient3, 200), +]; + +let instruction = batch_compress( + payer, + owner, + source_token_account, + mint, + recipients, + output_state_tree, +)?; +``` + +### Decompress to SPL token account. + +```rust +use light_compressed_token_client::{DecompressParams, create_decompress_instruction}; + +let params = DecompressParams { + payer, + input_compressed_token_accounts: vec![(account, token_data, merkle_context)], + to_address: destination_token_account, // SPL token account, eg ATA. + amount: 1000, + recent_input_state_root_indices: vec![Some(0)], // fetch via get_validity_proof + recent_validity_proof: Some(proof), // fetch via get_validity_proof + output_state_tree: Some(state_tree), + token_program_id: None, // defaults to SPL Token +}; + +let instruction = create_decompress_instruction(params)?; +``` + +## Features + +- Supports SPL-token and Token-22 mints. + +## License + +Apache-2.0 diff --git a/sdk-libs/compressed-token-client/src/instructions.rs b/sdk-libs/compressed-token-client/src/instructions.rs new file mode 100644 index 0000000000..f0ff302ffb --- /dev/null +++ b/sdk-libs/compressed-token-client/src/instructions.rs @@ -0,0 +1,311 @@ +//! Instruction builders for compressed token operations + +use anchor_spl::token_interface::spl_token_2022; +use light_compressed_token::{process_transfer::TokenTransferOutputData, TokenData}; +use solana_sdk::instruction::Instruction; +use solana_sdk::pubkey::Pubkey; + +use crate::{transfer_sdk, CompressedAccount, CompressedProof, MerkleContext}; + +/// Error type for instruction builder operations +#[derive(Debug, thiserror::Error)] +pub enum CompressedTokenError { + #[error("Invalid parameters: {0}")] + InvalidParams(String), + #[error("Serialization error: {0}")] + SerializationError(String), +} + +/// Parameters for creating a compress instruction +#[derive(Debug, Clone)] +pub struct CompressParams { + /// The payer of the transaction + pub payer: Pubkey, + /// Owner of the uncompressed token account + pub owner: Pubkey, + /// Source token account address + pub source: Pubkey, + /// Owner of the compressed token account + pub to_address: Pubkey, + /// Mint address of the token to compress + pub mint: Pubkey, + /// Amount of tokens to compress + pub amount: u64, + /// The state tree that the output should be inserted into + pub output_state_tree: Pubkey, + /// Optional: The token program ID. Default: SPL Token Program ID + pub token_program_id: Option, + /// Optional: Multiple recipients and amounts for batch compression + pub batch_recipients: Option>, +} + +/// Parameters for creating a decompress instruction +#[derive(Debug, Clone)] +pub struct DecompressParams { + /// The payer of the transaction + pub payer: Pubkey, + /// Input compressed token accounts to be consumed + pub input_compressed_token_accounts: Vec<(CompressedAccount, TokenData, MerkleContext)>, + /// Address of uncompressed destination token account + pub to_address: Pubkey, + /// Amount of tokens to decompress + pub amount: u64, + /// The recent state root indices of the input state + pub recent_input_state_root_indices: Vec>, + /// The recent validity proof for state inclusion + pub recent_validity_proof: Option, + /// The state tree that the change output should be inserted into + pub output_state_tree: Option, + /// Optional: The token program ID. Default: SPL Token Program ID + pub token_program_id: Option, +} + +/// Create a compress instruction +/// +/// This instruction compresses tokens from an SPL token account to N recipients. +pub fn create_compress_instruction( + params: CompressParams, +) -> Result { + let token_program = params.token_program_id.unwrap_or(anchor_spl::token::ID); + + let output_compressed_accounts = if let Some(ref batch_recipients) = params.batch_recipients { + batch_recipients + .iter() + .map(|(recipient, amount)| TokenTransferOutputData { + owner: *recipient, + amount: *amount, + lamports: None, + merkle_tree: params.output_state_tree, + }) + .collect() + } else { + vec![TokenTransferOutputData { + owner: params.to_address, + amount: params.amount, + lamports: None, + merkle_tree: params.output_state_tree, + }] + }; + let total_amount: u64 = output_compressed_accounts.iter().map(|x| x.amount).sum(); + + // TODO: refactor. + let ix = match transfer_sdk::create_transfer_instruction( + ¶ms.payer, + ¶ms.owner, + &[], + &output_compressed_accounts, + &[], + &None, + &[], + &[], + params.mint, + None, + true, + Some(total_amount), + Some(crate::get_token_pool_pda(¶ms.mint)), + Some(params.source), + false, + None, + None, + token_program == spl_token_2022::ID, + &[], + false, + ) { + Ok(ix) => ix, + Err(e) => { + return Err(CompressedTokenError::SerializationError(format!( + "Failed to create instruction: {:?}", + e + ))) + } + }; + + Ok(ix) +} + +/// Create a decompress instruction +/// +/// This instruction decompresses compressed tokens to an SPL token account. +pub fn create_decompress_instruction( + params: DecompressParams, +) -> Result { + if params.input_compressed_token_accounts.is_empty() { + return Err(CompressedTokenError::InvalidParams( + "No input compressed token accounts provided".to_string(), + )); + } + + let token_program = params.token_program_id.unwrap_or(anchor_spl::token::ID); + + let (compressed_accounts, token_data, merkle_contexts): (Vec<_>, Vec<_>, Vec<_>) = params + .input_compressed_token_accounts + .into_iter() + .map(|(account, data, context)| (account, data, context)) + .fold( + (Vec::new(), Vec::new(), Vec::new()), + |(mut accounts, mut data, mut contexts), (account, token_data, context)| { + accounts.push(account); + data.push(token_data); + contexts.push(context); + (accounts, data, contexts) + }, + ); + + let mint = token_data[0].mint; + let owner = token_data[0].owner; + + let input_total: u64 = token_data.iter().map(|td| td.amount).sum(); + let remaining_amount = input_total.saturating_sub(params.amount); + + let output_compressed_accounts = if remaining_amount > 0 { + vec![TokenTransferOutputData { + owner, + amount: remaining_amount, + lamports: None, + merkle_tree: params + .output_state_tree + .unwrap_or(merkle_contexts[0].merkle_tree_pubkey.into()), + }] + } else { + vec![] + }; + + // TODO: refactor. + transfer_sdk::create_transfer_instruction( + ¶ms.payer, + &owner, + &merkle_contexts, + &output_compressed_accounts, + ¶ms.recent_input_state_root_indices, + ¶ms.recent_validity_proof, + &token_data, + &compressed_accounts, + mint, + None, + false, + Some(params.amount), + Some(crate::get_token_pool_pda(&mint)), + Some(params.to_address), + false, + None, + None, + token_program == spl_token_2022::ID, + &[], + false, + ) + .map_err(|e| { + CompressedTokenError::SerializationError(format!("Failed to create instruction: {:?}", e)) + }) +} + +/// Create a compress instruction with a single recipient. +pub fn compress( + payer: Pubkey, + owner: Pubkey, + source_token_account: Pubkey, + mint: Pubkey, + amount: u64, + to_address: Pubkey, + output_state_tree: Pubkey, +) -> Result { + create_compress_instruction(CompressParams { + payer, + owner, + source: source_token_account, + to_address, + mint, + amount, + output_state_tree, + token_program_id: None, + batch_recipients: None, + }) +} + +/// Creates a compress instruction to compress tokens to multiple recipients. +pub fn batch_compress( + payer: Pubkey, + owner: Pubkey, + source_token_account: Pubkey, + mint: Pubkey, + recipients: Vec, + amounts: Vec, + output_state_tree: Pubkey, +) -> Result { + if recipients.len() != amounts.len() { + return Err(CompressedTokenError::InvalidParams( + "Recipients and amounts must have the same length".to_string(), + )); + } + + create_compress_instruction(CompressParams { + payer, + owner, + source: source_token_account, + to_address: Pubkey::default(), + mint, + amount: 0, + output_state_tree, + token_program_id: None, + batch_recipients: Some(recipients.into_iter().zip(amounts).collect()), + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::PROGRAM_ID; + + #[test] + fn test_compress_instruction() { + let payer = Pubkey::new_unique(); + let owner = Pubkey::new_unique(); + let source = Pubkey::new_unique(); + let to_address = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + let output_state_tree = Pubkey::new_unique(); + + let result = compress( + payer, + owner, + source, + mint, + 1000, + to_address, + output_state_tree, + ); + + assert!(result.is_ok()); + let instruction = result.unwrap(); + assert_eq!(instruction.program_id, PROGRAM_ID); + } + + #[test] + fn test_batch_compress_instruction() { + let payer = Pubkey::new_unique(); + let owner = Pubkey::new_unique(); + let source = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + let output_state_tree = Pubkey::new_unique(); + + let recipients = vec![ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ]; + let amounts = vec![500, 300, 200]; + + let result = batch_compress( + payer, + owner, + source, + mint, + recipients, + amounts, + output_state_tree, + ); + + assert!(result.is_ok()); + let instruction = result.unwrap(); + assert_eq!(instruction.program_id, PROGRAM_ID); + } +} diff --git a/sdk-libs/compressed-token-client/src/lib.rs b/sdk-libs/compressed-token-client/src/lib.rs new file mode 100644 index 0000000000..920120dbaf --- /dev/null +++ b/sdk-libs/compressed-token-client/src/lib.rs @@ -0,0 +1,27 @@ +//! Client library for interacting with the Compressed Token Program + +pub mod instructions; + +// We're also re-exporting helpers from the compressed-token program. +pub use instructions::{ + batch_compress, compress, create_compress_instruction, create_decompress_instruction, + CompressParams, CompressedTokenError, DecompressParams, +}; +pub use light_compressed_account::{ + compressed_account::{CompressedAccount, MerkleContext}, + instruction_data::compressed_proof::CompressedProof, + TreeType, +}; +pub use light_compressed_token::instruction; +pub use light_compressed_token::ErrorCode; +pub use light_compressed_token::{ + burn::sdk as burn_sdk, delegation::sdk as delegation_sdk, freeze::sdk as freeze_sdk, + process_compress_spl_token_account::sdk as compress_spl_token_account_sdk, + process_mint::mint_sdk, process_transfer::transfer_sdk, +}; +pub use light_compressed_token::{get_token_pool_pda, ID as PROGRAM_ID}; +pub use light_compressed_token::{ + process_transfer::{get_cpi_authority_pda, TokenTransferOutputData}, + token_data::{AccountState, TokenData}, +}; +pub use light_system_program::ID as LIGHT_SYSTEM_PROGRAM_ID; diff --git a/sdk-libs/compressed-token-client/tests/integration_test.rs b/sdk-libs/compressed-token-client/tests/integration_test.rs new file mode 100644 index 0000000000..ab9d1695ab --- /dev/null +++ b/sdk-libs/compressed-token-client/tests/integration_test.rs @@ -0,0 +1,225 @@ +#[cfg(test)] +mod tests { + use light_compressed_token_client::{ + batch_compress, compress, create_decompress_instruction, AccountState, CompressedAccount, + DecompressParams, MerkleContext, TokenData, TreeType, + }; + use solana_sdk::pubkey::Pubkey; + + #[test] + fn test_simple_compress() { + let payer = Pubkey::new_unique(); + let owner = Pubkey::new_unique(); + let source_token_account = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + let recipient = Pubkey::new_unique(); + let output_state_tree = Pubkey::new_unique(); + + let instruction = compress( + payer, + owner, + source_token_account, + mint, + 1000, // amount + recipient, + output_state_tree, + ) + .expect("Failed to create compress instruction"); + + assert_eq!( + instruction.program_id, + light_compressed_token_client::PROGRAM_ID + ); + assert!(!instruction.accounts.is_empty()); + + let account_keys: Vec<_> = instruction.accounts.iter().map(|a| a.pubkey).collect(); + assert!(account_keys.contains(&payer)); + assert!(account_keys.contains(&owner)); + assert!(account_keys.contains(&source_token_account)); + assert!(account_keys.contains(&output_state_tree)); + } + + #[test] + fn test_batch_compress() { + let payer = Pubkey::new_unique(); + let owner = Pubkey::new_unique(); + let source_token_account = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + let output_state_tree = Pubkey::new_unique(); + + let recipients = vec![ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ]; + let amounts = vec![500, 300, 200]; + + let total_amount: u64 = amounts.iter().sum(); + assert_eq!(total_amount, 1000); + + let instruction = batch_compress( + payer, + owner, + source_token_account, + mint, + recipients, + amounts, + output_state_tree, + ) + .expect("Failed to create batch compress instruction"); + + assert_eq!( + instruction.program_id, + light_compressed_token_client::PROGRAM_ID + ); + assert!(!instruction.accounts.is_empty()); + + let account_keys: Vec<_> = instruction.accounts.iter().map(|a| a.pubkey).collect(); + assert!(account_keys.contains(&payer)); + assert!(account_keys.contains(&owner)); + assert!(account_keys.contains(&source_token_account)); + + assert!(account_keys.contains(&output_state_tree)); + } + + #[test] + fn test_decompress() { + let payer = Pubkey::new_unique(); + let owner = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + let destination_token_account = Pubkey::new_unique(); + let merkle_tree = Pubkey::new_unique(); + let queue = Pubkey::new_unique(); + + let compressed_account = CompressedAccount { + owner: light_compressed_token_client::PROGRAM_ID, + lamports: 0, + data: None, + address: None, + }; + + let token_data = TokenData { + mint, + owner, + amount: 1000, + delegate: None, + state: AccountState::Initialized, + tlv: None, + }; + + let merkle_context = MerkleContext { + merkle_tree_pubkey: merkle_tree, + queue_pubkey: queue, + leaf_index: 0, + prove_by_index: false, + tree_type: TreeType::StateV2, + }; + + let params = DecompressParams { + payer, + input_compressed_token_accounts: vec![( + compressed_account.clone(), + token_data.clone(), + merkle_context.clone(), + )], + to_address: destination_token_account, + amount: 500, + recent_input_state_root_indices: vec![Some(0)], + recent_validity_proof: None, + output_state_tree: Some(merkle_tree), + token_program_id: None, + }; + + let instruction = + create_decompress_instruction(params).expect("Failed to create decompress instruction"); + + assert_eq!( + instruction.program_id, + light_compressed_token_client::PROGRAM_ID + ); + assert!(!instruction.accounts.is_empty()); + + let account_keys: Vec<_> = instruction.accounts.iter().map(|a| a.pubkey).collect(); + assert!(account_keys.contains(&payer)); + assert!(account_keys.contains(&destination_token_account)); + assert!(account_keys.contains(&merkle_tree)); + assert!(account_keys.contains(&queue)); + + assert_eq!(token_data.amount, 1000); + assert_eq!(token_data.owner, owner); + assert_eq!(token_data.mint, mint); + assert_eq!(token_data.state, AccountState::Initialized); + + assert_eq!( + compressed_account.owner, + light_compressed_token_client::PROGRAM_ID + ); + assert_eq!(compressed_account.lamports, 0); + + assert_eq!(merkle_context.merkle_tree_pubkey, merkle_tree); + assert_eq!(merkle_context.queue_pubkey, queue); + assert_eq!(merkle_context.leaf_index, 0); + assert!(!merkle_context.prove_by_index); + assert_eq!(merkle_context.tree_type, TreeType::StateV2); + } + + #[test] + fn test_decompress_partial_amount() { + let payer = Pubkey::new_unique(); + let owner = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + let destination_token_account = Pubkey::new_unique(); + let merkle_tree = Pubkey::new_unique(); + let queue = Pubkey::new_unique(); + + let total_amount = 1000u64; + let decompress_amount = 500u64; + + let compressed_account = CompressedAccount { + owner: light_compressed_token_client::PROGRAM_ID, + lamports: 0, + data: None, + address: None, + }; + + let token_data = TokenData { + mint, + owner, + amount: total_amount, + delegate: None, + state: AccountState::Initialized, + tlv: None, + }; + + let merkle_context = MerkleContext { + merkle_tree_pubkey: merkle_tree, + queue_pubkey: queue, + leaf_index: 0, + prove_by_index: false, + tree_type: TreeType::StateV2, + }; + + let params = DecompressParams { + payer, + input_compressed_token_accounts: vec![( + compressed_account, + token_data.clone(), + merkle_context, + )], + to_address: destination_token_account, + amount: decompress_amount, + recent_input_state_root_indices: vec![Some(0)], + recent_validity_proof: None, + output_state_tree: Some(merkle_tree), + token_program_id: None, + }; + + let instruction = + create_decompress_instruction(params).expect("Failed to create decompress instruction"); + + assert!(instruction.accounts.len() > 0); + assert_eq!(token_data.amount, total_amount); + assert!(decompress_amount < total_amount); + assert_eq!(total_amount - decompress_amount, 500); + } +} diff --git a/sdk-libs/compressed-token-sdk/Cargo.toml b/sdk-libs/compressed-token-sdk/Cargo.toml new file mode 100644 index 0000000000..33d60e1e42 --- /dev/null +++ b/sdk-libs/compressed-token-sdk/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "light-compressed-token-sdk" +version = "0.1.0" +description = "Rust SDK for interacting with the Compressed Token Program on Solana" +repository = "https://github.com/Lightprotocol/light-protocol" +license = "Apache-2.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "light_compressed_token_sdk" + +[features] +default = ["custom-heap"] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +custom-heap = ["light-heap"] +mem-profiling = [] +test-sbf = [] +bench-sbf = [] +idl-build = ["anchor-lang/idl-build"] +anchor = ["light-compressed-account/anchor"] + +[dependencies] +light-compressed-account = { workspace = true, features = ["anchor"] } +solana-program = { workspace = true } +anchor-lang = { workspace = true } +num-bigint = { workspace = true } +borsh = { workspace = true } +light-heap = { workspace = true, optional = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-sdk = { workspace = true } + +[dev-dependencies] +num-bigint = { workspace = true } diff --git a/sdk-libs/compressed-token-sdk/Xargo.toml b/sdk-libs/compressed-token-sdk/Xargo.toml new file mode 100644 index 0000000000..1744f098ae --- /dev/null +++ b/sdk-libs/compressed-token-sdk/Xargo.toml @@ -0,0 +1,2 @@ +[target.bpfel-unknown-unknown.dependencies.std] +features = [] \ No newline at end of file diff --git a/sdk-libs/compressed-token-sdk/readme.md b/sdk-libs/compressed-token-sdk/readme.md new file mode 100644 index 0000000000..263a582587 --- /dev/null +++ b/sdk-libs/compressed-token-sdk/readme.md @@ -0,0 +1,11 @@ +# Light Compressed Token SDK + +Rust SDK with helpers to interact with the Compressed Token Program on Solana. + +Documentation is available at https://zkcompression.com + +Program Source code: https://github.com/Lightprotocol/light-protocol/tree/main/programs/compressed-token + +## Audit + +This code is unaudited. Use at your own risk. diff --git a/sdk-libs/compressed-token-sdk/src/cpi/account_info.rs b/sdk-libs/compressed-token-sdk/src/cpi/account_info.rs new file mode 100644 index 0000000000..22d6a9d128 --- /dev/null +++ b/sdk-libs/compressed-token-sdk/src/cpi/account_info.rs @@ -0,0 +1,33 @@ +use crate::state::InputTokenDataWithContext; +use light_compressed_account::compressed_account::PackedMerkleContext; + +/// Get an existing compressed token account from token_data in optimized +/// format. +/// +/// Example: +/// ```rust +/// let data = InstructionData::try_from_slice(instruction_data) +/// .map_err(|_| ProgramError::InvalidInstructionData)?; +/// +/// let compressed_token_account = get_compressed_token_account_info( +/// data.amount, +/// data.merkle_context, +/// data.root_index, +/// None +/// ); +/// ``` +pub fn get_compressed_token_account_info( + merkle_context: PackedMerkleContext, + root_index: u16, + amount: u64, + lamports: Option, +) -> InputTokenDataWithContext { + InputTokenDataWithContext { + amount, + delegate_index: None, + merkle_context, + root_index, + lamports, + tlv: None, + } +} diff --git a/sdk-libs/compressed-token-sdk/src/cpi/accounts.rs b/sdk-libs/compressed-token-sdk/src/cpi/accounts.rs new file mode 100644 index 0000000000..4d32f0621e --- /dev/null +++ b/sdk-libs/compressed-token-sdk/src/cpi/accounts.rs @@ -0,0 +1,20 @@ +use solana_program::account_info::AccountInfo; + +/// CPI Accounts for decompressing compressed token accounts. +pub struct CompressedTokenDecompressCpiAccounts<'a> { + pub fee_payer: AccountInfo<'a>, + pub authority: AccountInfo<'a>, + pub cpi_authority_pda: AccountInfo<'a>, + pub light_system_program: AccountInfo<'a>, + pub registered_program_pda: AccountInfo<'a>, + pub noop_program: AccountInfo<'a>, + pub account_compression_authority: AccountInfo<'a>, + pub account_compression_program: AccountInfo<'a>, + pub self_program: AccountInfo<'a>, + pub token_pool_pda: AccountInfo<'a>, + pub decompress_destination: AccountInfo<'a>, + pub token_program: AccountInfo<'a>, + pub system_program: AccountInfo<'a>, + pub state_merkle_tree: AccountInfo<'a>, + pub queue: AccountInfo<'a>, +} diff --git a/sdk-libs/compressed-token-sdk/src/cpi/instruction.rs b/sdk-libs/compressed-token-sdk/src/cpi/instruction.rs new file mode 100644 index 0000000000..4bc94c6386 --- /dev/null +++ b/sdk-libs/compressed-token-sdk/src/cpi/instruction.rs @@ -0,0 +1,92 @@ +use crate::cpi::accounts::CompressedTokenDecompressCpiAccounts; +#[cfg(feature = "anchor")] +use anchor_lang::AnchorSerialize; +#[cfg(not(feature = "anchor"))] +use borsh::BorshSerialize as AnchorSerialize; +use light_compressed_account::instruction_data::{ + compressed_proof::CompressedProof, cpi_context::CompressedCpiContext, +}; +use solana_program::{ + instruction::{AccountMeta, Instruction}, + program_error::ProgramError, + pubkey::Pubkey, +}; + +use crate::state::{CompressedTokenInstructionDataTransfer, InputTokenDataWithContext}; + +/// Return Instruction to decompress compressed token accounts. +/// Proof can be None if prove_by_index is used. +pub fn decompress( + mint: &Pubkey, + compressed_token_accounts: Vec, + proof: &Option, + light_cpi_accounts: &CompressedTokenDecompressCpiAccounts, + cpi_context: Option<&CompressedCpiContext>, +) -> Result { + let data = + decompress_token_instruction_data(mint, proof, compressed_token_accounts, cpi_context); + + let accounts = vec![ + AccountMeta::new(*light_cpi_accounts.fee_payer.key, true), + AccountMeta::new_readonly(*light_cpi_accounts.authority.key, true), + AccountMeta::new_readonly(*light_cpi_accounts.cpi_authority_pda.key, false), + AccountMeta::new_readonly(*light_cpi_accounts.light_system_program.key, false), + AccountMeta::new_readonly(*light_cpi_accounts.registered_program_pda.key, false), + AccountMeta::new_readonly(*light_cpi_accounts.noop_program.key, false), + AccountMeta::new_readonly(*light_cpi_accounts.account_compression_authority.key, false), + AccountMeta::new_readonly(*light_cpi_accounts.account_compression_program.key, false), + AccountMeta::new_readonly(*light_cpi_accounts.self_program.key, false), + AccountMeta::new(*light_cpi_accounts.token_pool_pda.key, false), + AccountMeta::new(*light_cpi_accounts.decompress_destination.key, false), + AccountMeta::new_readonly(*light_cpi_accounts.token_program.key, false), + AccountMeta::new_readonly(*light_cpi_accounts.system_program.key, false), + AccountMeta::new(*light_cpi_accounts.state_merkle_tree.key, false), + AccountMeta::new(*light_cpi_accounts.queue.key, false), + ]; + + Ok(Instruction { + program_id: *light_cpi_accounts.self_program.key, + accounts, + data, + }) +} + +/// Return Instruction Data to decompress compressed token accounts. +pub fn decompress_token_instruction_data( + mint: &Pubkey, + proof: &Option, + compressed_token_accounts: Vec, + cpi_context: Option<&CompressedCpiContext>, +) -> Vec { + let amount = compressed_token_accounts + .iter() + .map(|data| data.amount) + .sum(); + + let compressed_token_instruction_data_transfer = CompressedTokenInstructionDataTransfer { + proof: *proof, + mint: *mint, + delegated_transfer: None, + input_token_data_with_context: compressed_token_accounts, + output_compressed_accounts: Vec::new(), + is_compress: false, + compress_or_decompress_amount: Some(amount), + cpi_context: cpi_context.copied(), + lamports_change_account_merkle_tree_index: None, + with_transaction_hash: false, + }; + + let mut inputs = Vec::new(); + // transfer discriminator + inputs.extend_from_slice(&[163, 52, 200, 231, 140, 3, 69, 186]); + + let mut serialized_data = Vec::new(); + compressed_token_instruction_data_transfer + .serialize(&mut serialized_data) + .unwrap(); + + // Add length buffer + inputs.extend_from_slice(&(serialized_data.len() as u32).to_le_bytes()); + inputs.extend_from_slice(&serialized_data); + inputs +} diff --git a/sdk-libs/compressed-token-sdk/src/cpi/mod.rs b/sdk-libs/compressed-token-sdk/src/cpi/mod.rs new file mode 100644 index 0000000000..c5694ecdb1 --- /dev/null +++ b/sdk-libs/compressed-token-sdk/src/cpi/mod.rs @@ -0,0 +1,3 @@ +pub mod account_info; +pub mod accounts; +pub mod instruction; diff --git a/sdk-libs/compressed-token-sdk/src/lib.rs b/sdk-libs/compressed-token-sdk/src/lib.rs new file mode 100644 index 0000000000..b9d16ad135 --- /dev/null +++ b/sdk-libs/compressed-token-sdk/src/lib.rs @@ -0,0 +1,2 @@ +pub mod cpi; +pub mod state; diff --git a/sdk-libs/compressed-token-sdk/src/state.rs b/sdk-libs/compressed-token-sdk/src/state.rs new file mode 100644 index 0000000000..c4abdec1db --- /dev/null +++ b/sdk-libs/compressed-token-sdk/src/state.rs @@ -0,0 +1,89 @@ +#[cfg(feature = "anchor")] +use anchor_lang::{AnchorDeserialize, AnchorSerialize}; +#[cfg(not(feature = "anchor"))] +use borsh::{BorshDeserialize as AnchorDeserialize, BorshSerialize as AnchorSerialize}; +use light_compressed_account::{ + compressed_account::{CompressedAccountWithMerkleContext, PackedMerkleContext}, + instruction_data::{compressed_proof::CompressedProof, cpi_context::CompressedCpiContext}, +}; + +use solana_program::pubkey::Pubkey; + +#[derive(Clone, Debug, PartialEq, Eq, AnchorSerialize, AnchorDeserialize)] +pub struct PackedTokenTransferOutputData { + pub owner: Pubkey, + pub amount: u64, + pub lamports: Option, + pub merkle_tree_index: u8, + /// Placeholder for TokenExtension tlv data (unimplemented) + pub tlv: Option>, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, AnchorDeserialize, AnchorSerialize)] +#[repr(u8)] +pub enum AccountState { + Initialized, + Frozen, +} + +#[derive(Debug, PartialEq, Eq, AnchorDeserialize, AnchorSerialize, Clone)] +pub struct TokenData { + /// The mint associated with this account + pub mint: Pubkey, + /// The owner of this account. + pub owner: Pubkey, + /// The amount of tokens this account holds. + pub amount: u64, + /// If `delegate` is `Some` then `delegated_amount` represents + /// the amount authorized by the delegate + pub delegate: Option, + /// The account's state + pub state: AccountState, + /// Placeholder for TokenExtension tlv data (unimplemented) + pub tlv: Option>, +} + +#[derive(Debug, Clone)] +pub struct TokenDataWithMerkleContext { + pub token_data: TokenData, + pub compressed_account: CompressedAccountWithMerkleContext, +} + +#[derive(Debug, Clone, AnchorDeserialize, AnchorSerialize)] +pub struct CompressedTokenInstructionDataTransfer { + pub proof: Option, + pub mint: Pubkey, + /// Is required if the signer is delegate, + /// -> delegate is authority account, + /// owner = Some(owner) is the owner of the token account. + pub delegated_transfer: Option, + pub input_token_data_with_context: Vec, + pub output_compressed_accounts: Vec, + pub is_compress: bool, + pub compress_or_decompress_amount: Option, + pub cpi_context: Option, + pub lamports_change_account_merkle_tree_index: Option, + pub with_transaction_hash: bool, +} + +#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)] +pub struct InputTokenDataWithContext { + pub amount: u64, + pub delegate_index: Option, + pub merkle_context: PackedMerkleContext, + pub root_index: u16, + pub lamports: Option, + /// Placeholder for TokenExtension tlv data (unimplemented) + pub tlv: Option>, +} + +/// Struct to provide the owner when the delegate is signer of the transaction. +#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)] +pub struct DelegatedTransfer { + pub owner: Pubkey, + /// Index of change compressed account in output compressed accounts. In + /// case that the delegate didn't spend the complete delegated compressed + /// account balance the change compressed account will be delegated to her + /// as well. + pub delegate_change_account_index: Option, +}